Merge branch 'master' of https://github.com/DSpace/DSpace into DS-3422

# Conflicts:
#	dspace-xmlui/src/main/java/org/dspace/app/xmlui/aspect/submission/submit/AccessStepUtil.java
#	dspace-xmlui/src/main/java/org/dspace/app/xmlui/objectmanager/ItemAdapter.java
This commit is contained in:
Andrea Bollini
2017-02-12 16:17:07 +01:00
23 changed files with 318 additions and 141 deletions

1
.gitignore vendored
View File

@@ -6,6 +6,7 @@ tags
## Ignore project files created by Eclipse ## Ignore project files created by Eclipse
.settings/ .settings/
/bin/
.project .project
.classpath .classpath

View File

@@ -53,6 +53,8 @@ We welcome everyone to participate in these lists:
* [dspace-tech@googlegroups.com](https://groups.google.com/d/forum/dspace-tech) : Technical support mailing list. See also our guide for [How to troubleshoot an error](https://wiki.duraspace.org/display/DSPACE/Troubleshoot+an+error). * [dspace-tech@googlegroups.com](https://groups.google.com/d/forum/dspace-tech) : Technical support mailing list. See also our guide for [How to troubleshoot an error](https://wiki.duraspace.org/display/DSPACE/Troubleshoot+an+error).
* [dspace-devel@googlegroups.com](https://groups.google.com/d/forum/dspace-devel) : Developers / Development mailing list * [dspace-devel@googlegroups.com](https://groups.google.com/d/forum/dspace-devel) : Developers / Development mailing list
Great Q&A is also available under the [DSpace tag on Stackoverflow](http://stackoverflow.com/questions/tagged/dspace)
Additional support options are listed at https://wiki.duraspace.org/display/DSPACE/Support Additional support options are listed at https://wiki.duraspace.org/display/DSPACE/Support
DSpace also has an active service provider network. If you'd rather hire a service provider to DSpace also has an active service provider network. If you'd rather hire a service provider to

View File

@@ -715,6 +715,10 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
{ {
value = ""; value = "";
} }
else
{
value = value.trim();
}
// //getElementData(n, "element"); // //getElementData(n, "element");
String element = getAttributeValue(n, "element"); String element = getAttributeValue(n, "element");
String qualifier = getAttributeValue(n, "qualifier"); //NodeValue(); String qualifier = getAttributeValue(n, "qualifier"); //NodeValue();
@@ -736,8 +740,8 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
{ {
qualifier = null; qualifier = null;
} }
// only add metadata if it is no test and there is an actual value
if (!isTest) if (!isTest && !value.equals(""))
{ {
itemService.addMetadata(c, i, schema, element, qualifier, language, value); itemService.addMetadata(c, i, schema, element, qualifier, language, value);
} }

View File

@@ -218,9 +218,9 @@ public class ResourcePolicy implements ReloadableEntity<Integer> {
} }
/** /**
* gets ID for Group referred to by this policy * gets the Group referred to by this policy
* *
* @return groupID, or null if no group set * @return group, or null if no group set
*/ */
public Group getGroup() public Group getGroup()
{ {
@@ -228,7 +228,7 @@ public class ResourcePolicy implements ReloadableEntity<Integer> {
} }
/** /**
* sets ID for Group referred to by this policy * sets the Group referred to by this policy
* @param epersonGroup Group * @param epersonGroup Group
*/ */
public void setGroup(Group epersonGroup) public void setGroup(Group epersonGroup)

View File

@@ -145,17 +145,14 @@ public final class ResultsPruner
throw new IllegalStateException("Problem parsing duration: " throw new IllegalStateException("Problem parsing duration: "
+ e.getMessage(), e); + e.getMessage(), e);
} }
ChecksumResultCode code = ChecksumResultCode.valueOf(resultCode); if ("default".equals(resultCode)) {
if(code == null)
{
throw new IllegalStateException("Checksum result code not found: " + resultCode);
}
if ("default".equals(resultCode))
{
rp.setDefaultDuration(duration); rp.setDefaultDuration(duration);
} } else {
else ChecksumResultCode code = ChecksumResultCode.valueOf(resultCode);
{ if (code == null) {
throw new IllegalStateException("Checksum result code not found: " + resultCode);
}
rp.addInterested(code, duration); rp.addInterested(code, duration);
} }
} }

View File

@@ -254,13 +254,13 @@ public class BitstreamServiceImpl extends DSpaceObjectServiceImpl<Bitstream> imp
bundle.getBitstreams().remove(bitstream); bundle.getBitstreams().remove(bitstream);
} }
// Remove policies
authorizeService.removeAllPolicies(context, bitstream);
// Remove bitstream itself // Remove bitstream itself
bitstream.setDeleted(true); bitstream.setDeleted(true);
update(context, bitstream); update(context, bitstream);
// Remove policies only after the bitstream has been updated (otherwise the current user has not WRITE rights)
authorizeService.removeAllPolicies(context, bitstream);
//Remove all bundles from the bitstream object, clearing the connection in 2 ways //Remove all bundles from the bitstream object, clearing the connection in 2 ways
bundles.clear(); bundles.clear();
} }

View File

@@ -19,8 +19,6 @@ import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.*; import org.dspace.content.*;
import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.factory.ContentServiceFactory;
@@ -28,6 +26,8 @@ import org.dspace.content.service.BitstreamService;
import org.dspace.curate.AbstractCurationTask; import org.dspace.curate.AbstractCurationTask;
import org.dspace.curate.Curator; import org.dspace.curate.Curator;
import org.dspace.curate.Suspendable; import org.dspace.curate.Suspendable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** ClamScan.java /** ClamScan.java
* *
@@ -55,7 +55,7 @@ public class ClamScan extends AbstractCurationTask
protected final String SCAN_FAIL_MESSAGE = "Error encountered using virus service - check setup"; protected final String SCAN_FAIL_MESSAGE = "Error encountered using virus service - check setup";
protected final String NEW_ITEM_HANDLE = "in workflow"; protected final String NEW_ITEM_HANDLE = "in workflow";
private static Logger log = Logger.getLogger(ClamScan.class); private static final Logger log = LoggerFactory.getLogger(ClamScan.class);
protected String host = null; protected String host = null;
protected int port = 0; protected int port = 0;
@@ -234,18 +234,18 @@ public class ClamScan extends AbstractCurationTask
} }
} }
/** scan /** A buffer to hold chunks of an input stream to be scanned for viruses. */
* final byte[] buffer = new byte[DEFAULT_CHUNK_SIZE];
/**
* Issue the INSTREAM command and return the response to * Issue the INSTREAM command and return the response to
* and from the clamav daemon * and from the clamav daemon.
* *
* @param the bitstream for reporting results * @param bitstream the bitstream for reporting results
* @param the InputStream to read * @param inputstream the InputStream to read
* @param the item handle for reporting results * @param itemHandle the item handle for reporting results
* @return a ScanResult representing the server response * @return a ScanResult representing the server response
* @throws IOException if IO error
*/ */
final byte[] buffer = new byte[DEFAULT_CHUNK_SIZE];;
protected int scan(Bitstream bitstream, InputStream inputstream, String itemHandle) protected int scan(Bitstream bitstream, InputStream inputstream, String itemHandle)
{ {
try try
@@ -254,7 +254,7 @@ public class ClamScan extends AbstractCurationTask
} }
catch (IOException e) catch (IOException e)
{ {
log.error("Error writing INSTREAM command . . ."); log.error("Error writing INSTREAM command", e);
return Curator.CURATE_ERROR; return Curator.CURATE_ERROR;
} }
int read = DEFAULT_CHUNK_SIZE; int read = DEFAULT_CHUNK_SIZE;
@@ -266,7 +266,7 @@ public class ClamScan extends AbstractCurationTask
} }
catch (IOException e) catch (IOException e)
{ {
log.error("Failed attempting to read the InputStream . . . "); log.error("Failed attempting to read the InputStream", e);
return Curator.CURATE_ERROR; return Curator.CURATE_ERROR;
} }
if (read == -1) if (read == -1)
@@ -280,7 +280,7 @@ public class ClamScan extends AbstractCurationTask
} }
catch (IOException e) catch (IOException e)
{ {
log.error("Could not write to the socket . . . "); log.error("Could not write to the socket", e);
return Curator.CURATE_ERROR; return Curator.CURATE_ERROR;
} }
} }
@@ -291,7 +291,7 @@ public class ClamScan extends AbstractCurationTask
} }
catch (IOException e) catch (IOException e)
{ {
log.error("Error writing zero-length chunk to socket") ; log.error("Error writing zero-length chunk to socket", e) ;
return Curator.CURATE_ERROR; return Curator.CURATE_ERROR;
} }
try try
@@ -301,7 +301,7 @@ public class ClamScan extends AbstractCurationTask
} }
catch (IOException e) catch (IOException e)
{ {
log.error( "Error reading result from socket"); log.error( "Error reading result from socket", e);
return Curator.CURATE_ERROR; return Curator.CURATE_ERROR;
} }
@@ -309,7 +309,7 @@ public class ClamScan extends AbstractCurationTask
{ {
String response = new String(buffer, 0, read); String response = new String(buffer, 0, read);
logDebugMessage("Response: " + response); logDebugMessage("Response: " + response);
if (response.indexOf("FOUND") != -1) if (response.contains("FOUND"))
{ {
String itemMsg = "item - " + itemHandle + ": "; String itemMsg = "item - " + itemHandle + ": ";
String bsMsg = "bitstream - " + bitstream.getName() + String bsMsg = "bitstream - " + bitstream.getName() +

View File

@@ -52,12 +52,12 @@ public class Curator
// transaction scopes // transaction scopes
public static enum TxScope { OBJECT, CURATION, OPEN }; public static enum TxScope { OBJECT, CURATION, OPEN };
private static Logger log = Logger.getLogger(Curator.class); private static final Logger log = Logger.getLogger(Curator.class);
protected static final ThreadLocal<Context> curationCtx = new ThreadLocal<Context>(); protected static final ThreadLocal<Context> curationCtx = new ThreadLocal<>();
protected Map<String, TaskRunner> trMap = new HashMap<String, TaskRunner>(); protected Map<String, TaskRunner> trMap = new HashMap<>();
protected List<String> perfList = new ArrayList<String>(); protected List<String> perfList = new ArrayList<>();
protected TaskQueue taskQ = null; protected TaskQueue taskQ = null;
protected String reporter = null; protected String reporter = null;
protected Invoked iMode = null; protected Invoked iMode = null;
@@ -180,8 +180,12 @@ public class Curator
* Performs all configured tasks upon object identified by id. If * Performs all configured tasks upon object identified by id. If
* the object can be resolved as a handle, the DSO will be the * the object can be resolved as a handle, the DSO will be the
* target object. * target object.
* *
* @param c a Dpace context * <p>
* Note: this method has the side-effect of setting this instance's Context
* reference. The setting is retained on return.
*
* @param c a DSpace context
* @param id an object identifier * @param id an object identifier
* @throws IOException if IO error * @throws IOException if IO error
*/ */
@@ -233,9 +237,10 @@ public class Curator
* <P> * <P>
* Note: Site-wide tasks will default to running as * Note: Site-wide tasks will default to running as
* an Anonymous User unless you call the Site-wide task * an Anonymous User unless you call the Site-wide task
* via the 'curate(Context,String)' method with an * via the {@link curate(Context,String)} or
* {@link #curate(Context, DSpaceObject)} method with an
* authenticated Context object. * authenticated Context object.
* *
* @param dso the DSpace object * @param dso the DSpace object
* @throws IOException if IO error * @throws IOException if IO error
*/ */
@@ -268,7 +273,26 @@ public class Curator
} }
} }
} }
/**
* Performs all configured tasks upon DSpace object
* (Community, Collection or Item).
*
* <p>
* Note: this method has the side-effect of setting this instance's Context
* reference. The setting is retained on return.
*
* @param c session context in which curation takes place.
* @param dso the single object to be curated.
* @throws java.io.IOException passed through.
*/
public void curate(Context c, DSpaceObject dso)
throws IOException
{
curationCtx.set(c);
curate(dso);
}
/** /**
* Places a curation request for the object identified by id on a * Places a curation request for the object identified by id on a
* managed queue named by the queueId. * managed queue named by the queueId.

View File

@@ -25,13 +25,17 @@ import org.apache.solr.client.solrj.impl.HttpSolrServer;
import org.apache.solr.client.solrj.request.AbstractUpdateRequest; import org.apache.solr.client.solrj.request.AbstractUpdateRequest;
import org.apache.solr.client.solrj.request.ContentStreamUpdateRequest; import org.apache.solr.client.solrj.request.ContentStreamUpdateRequest;
import org.apache.solr.client.solrj.request.CoreAdminRequest; import org.apache.solr.client.solrj.request.CoreAdminRequest;
import org.apache.solr.client.solrj.request.LukeRequest;
import org.apache.solr.client.solrj.response.FacetField; import org.apache.solr.client.solrj.response.FacetField;
import org.apache.solr.client.solrj.response.LukeResponse;
import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.client.solrj.response.RangeFacet; import org.apache.solr.client.solrj.response.RangeFacet;
import org.apache.solr.client.solrj.response.SolrPingResponse;
import org.apache.solr.client.solrj.util.ClientUtils; import org.apache.solr.client.solrj.util.ClientUtils;
import org.apache.solr.common.SolrDocument; import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList; import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.SolrInputDocument; import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.luke.FieldFlag;
import org.apache.solr.common.params.*; import org.apache.solr.common.params.*;
import org.dspace.content.*; import org.dspace.content.*;
import org.dspace.content.Collection; import org.dspace.content.Collection;
@@ -72,7 +76,7 @@ import java.util.*;
public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBean public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBean
{ {
private static final Logger log = Logger.getLogger(SolrLoggerServiceImpl.class); private static final Logger log = Logger.getLogger(SolrLoggerServiceImpl.class);
private static final String MULTIPLE_VALUES_SPLITTER = "|";
protected HttpSolrServer solr; protected HttpSolrServer solr;
public static final String DATE_FORMAT_8601 = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'"; public static final String DATE_FORMAT_8601 = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'";
@@ -84,7 +88,8 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
protected boolean useProxies; protected boolean useProxies;
private static List<String> statisticYearCores = new ArrayList<String>(); private static List<String> statisticYearCores = new ArrayList<String>();
private static boolean statisticYearCoresInit = false;
@Autowired(required = true) @Autowired(required = true)
protected BitstreamService bitstreamService; protected BitstreamService bitstreamService;
@Autowired(required = true) @Autowired(required = true)
@@ -126,28 +131,6 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
try try
{ {
server = new HttpSolrServer(configurationService.getProperty("solr-statistics.server")); server = new HttpSolrServer(configurationService.getProperty("solr-statistics.server"));
//Attempt to retrieve all the statistic year cores
File solrDir = new File(configurationService.getProperty("dspace.dir") + File.separator + "solr" + File.separator);
File[] solrCoreFiles = solrDir.listFiles(new FileFilter() {
@Override
public boolean accept(File file) {
//Core name example: statistics-2008
return file.getName().matches("statistics-\\d\\d\\d\\d");
}
});
//Base url should like : http://localhost:{port.number}/solr
String baseSolrUrl = server.getBaseURL().replace("statistics", "");
for (File solrCoreFile : solrCoreFiles) {
log.info("Loading core with name: " + solrCoreFile.getName());
createCore(server, solrCoreFile.getName());
//Add it to our cores list so we can query it !
statisticYearCores.add(baseSolrUrl.replace("http://", "").replace("https://", "") + solrCoreFile.getName());
}
//Also add the core containing the current year !
statisticYearCores.add(server.getBaseURL().replace("http://", "").replace("https://", ""));
} catch (Exception e) { } catch (Exception e) {
log.error(e.getMessage(), e); log.error(e.getMessage(), e);
} }
@@ -201,6 +184,7 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
{ {
return; return;
} }
initSolrYearCores();
try try
@@ -240,6 +224,7 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
if (solr == null || locationService == null) { if (solr == null || locationService == null) {
return; return;
} }
initSolrYearCores();
try { try {
SolrInputDocument doc1 = getCommonSolrDoc(dspaceObject, ip, userAgent, xforwardedfor, SolrInputDocument doc1 = getCommonSolrDoc(dspaceObject, ip, userAgent, xforwardedfor,
@@ -266,7 +251,6 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
log.error(e.getMessage(), e); log.error(e.getMessage(), e);
} }
} }
/** /**
* Returns a solr input document containing common information about the statistics * Returns a solr input document containing common information about the statistics
@@ -465,6 +449,7 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
{ {
SolrInputDocument solrDoc = getCommonSolrDoc(resultObject, request, currentUser); SolrInputDocument solrDoc = getCommonSolrDoc(resultObject, request, currentUser);
if (solrDoc == null) return; if (solrDoc == null) return;
initSolrYearCores();
for (String query : queries) { for (String query : queries) {
solrDoc.addField("query", query); solrDoc.addField("query", query);
@@ -511,6 +496,7 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
@Override @Override
public void postWorkflow(UsageWorkflowEvent usageWorkflowEvent) throws SQLException { public void postWorkflow(UsageWorkflowEvent usageWorkflowEvent) throws SQLException {
initSolrYearCores();
try { try {
SolrInputDocument solrDoc = getCommonSolrDoc(usageWorkflowEvent.getObject(), null, null); SolrInputDocument solrDoc = getCommonSolrDoc(usageWorkflowEvent.getObject(), null, null);
@@ -680,7 +666,6 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
/** /**
* Override to manage pages of documents * Override to manage pages of documents
*
* @param docs * @param docs
* a list of Solr documents * a list of Solr documents
* @throws IOException * @throws IOException
@@ -1247,8 +1232,12 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
yearQueryParams.put(CommonParams.FQ, filterQuery.toString()); yearQueryParams.put(CommonParams.FQ, filterQuery.toString());
yearQueryParams.put(CommonParams.WT, "csv"); yearQueryParams.put(CommonParams.WT, "csv");
//Tell SOLR how to escape and separate the values of multi-valued fields
yearQueryParams.put("csv.escape", "\\");
yearQueryParams.put("csv.mv.separator", MULTIPLE_VALUES_SPLITTER);
//Start by creating a new core //Start by creating a new core
String coreName = "statistics-" + dcStart.getYear(); String coreName = "statistics-" + dcStart.getYearUTC();
HttpSolrServer statisticsYearServer = createCore(solr, coreName); HttpSolrServer statisticsYearServer = createCore(solr, coreName);
System.out.println("Moving: " + totalRecords + " into core " + coreName); System.out.println("Moving: " + totalRecords + " into core " + coreName);
@@ -1263,7 +1252,7 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
HttpResponse response = new DefaultHttpClient().execute(get); HttpResponse response = new DefaultHttpClient().execute(get);
InputStream csvInputstream = response.getEntity().getContent(); InputStream csvInputstream = response.getEntity().getContent();
//Write the csv ouput to a file ! //Write the csv ouput to a file !
File csvFile = new File(tempDirectory.getPath() + File.separatorChar + "temp." + dcStart.getYear() + "." + i + ".csv"); File csvFile = new File(tempDirectory.getPath() + File.separatorChar + "temp." + dcStart.getYearUTC() + "." + i + ".csv");
FileUtils.copyInputStreamToFile(csvInputstream, csvFile); FileUtils.copyInputStreamToFile(csvInputstream, csvFile);
filesToUpload.add(csvFile); filesToUpload.add(csvFile);
@@ -1271,16 +1260,26 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
yearQueryParams.put(CommonParams.START, String.valueOf((i + 10000))); yearQueryParams.put(CommonParams.START, String.valueOf((i + 10000)));
} }
Set<String> multivaluedFields = getMultivaluedFieldNames();
for (File tempCsv : filesToUpload) { for (File tempCsv : filesToUpload) {
//Upload the data in the csv files to our new solr core //Upload the data in the csv files to our new solr core
ContentStreamUpdateRequest contentStreamUpdateRequest = new ContentStreamUpdateRequest("/update/csv"); ContentStreamUpdateRequest contentStreamUpdateRequest = new ContentStreamUpdateRequest("/update/csv");
contentStreamUpdateRequest.setParam("stream.contentType", "text/plain;charset=utf-8"); contentStreamUpdateRequest.setParam("stream.contentType", "text/plain;charset=utf-8");
contentStreamUpdateRequest.setParam("escape", "\\");
contentStreamUpdateRequest.setParam("skip", "_version_"); contentStreamUpdateRequest.setParam("skip", "_version_");
contentStreamUpdateRequest.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true); contentStreamUpdateRequest.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true);
contentStreamUpdateRequest.addFile(tempCsv, "text/plain;charset=utf-8"); contentStreamUpdateRequest.addFile(tempCsv, "text/plain;charset=utf-8");
//Add parsing directives for the multivalued fields so that they are stored as separate values instead of one value
for (String multivaluedField : multivaluedFields) {
contentStreamUpdateRequest.setParam("f." + multivaluedField + ".split", Boolean.TRUE.toString());
contentStreamUpdateRequest.setParam("f." + multivaluedField + ".separator", MULTIPLE_VALUES_SPLITTER);
}
statisticsYearServer.request(contentStreamUpdateRequest); statisticsYearServer.request(contentStreamUpdateRequest);
} }
statisticsYearServer.commit(true, true); statisticsYearServer.commit(true, true);
@@ -1297,14 +1296,56 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
protected HttpSolrServer createCore(HttpSolrServer solr, String coreName) throws IOException, SolrServerException { protected HttpSolrServer createCore(HttpSolrServer solr, String coreName) throws IOException, SolrServerException {
String solrDir = configurationService.getProperty("dspace.dir") + File.separator + "solr" + File.separator; String solrDir = configurationService.getProperty("dspace.dir") + File.separator + "solr" + File.separator;
String baseSolrUrl = solr.getBaseURL().replace("statistics", ""); String baseSolrUrl = solr.getBaseURL().replace("statistics", "");
//DS-3458: Test to see if a solr core already exists. If it exists, return that server. Otherwise create a new one.
HttpSolrServer returnServer = new HttpSolrServer(baseSolrUrl + "/" + coreName);
try {
SolrPingResponse ping = returnServer.ping();
log.debug(String.format("Ping of Solr Core [%s] Returned with Status [%d]", coreName, ping.getStatus()));
return returnServer;
} catch(Exception e) {
log.debug(String.format("Ping of Solr Core [%s] Failed with [%s]. New Core Will be Created", coreName, e.getClass().getName()));
}
//Unfortunately, this class is documented as "experimental and subject to change" on the Lucene website.
//http://lucene.apache.org/solr/4_4_0/solr-solrj/org/apache/solr/client/solrj/request/CoreAdminRequest.html
CoreAdminRequest.Create create = new CoreAdminRequest.Create(); CoreAdminRequest.Create create = new CoreAdminRequest.Create();
create.setCoreName(coreName); create.setCoreName(coreName);
//The config files for a statistics shard reside wihtin the statistics repository
create.setInstanceDir("statistics"); create.setInstanceDir("statistics");
create.setDataDir(solrDir + coreName + File.separator + "data"); create.setDataDir(solrDir + coreName + File.separator + "data");
HttpSolrServer solrServer = new HttpSolrServer(baseSolrUrl); HttpSolrServer solrServer = new HttpSolrServer(baseSolrUrl);
create.process(solrServer); create.process(solrServer);
log.info("Created core with name: " + coreName); log.info("Created core with name: " + coreName);
return new HttpSolrServer(baseSolrUrl + "/" + coreName); return returnServer;
}
/**
* Retrieves a list of all the multi valued fields in the solr core
* @return all fields tagged as multivalued
* @throws SolrServerException When getting the schema information from the SOLR core fails
* @throws IOException When connection to the SOLR server fails
*/
public Set<String> getMultivaluedFieldNames() throws SolrServerException, IOException {
Set<String> multivaluedFields = new HashSet<String>();
LukeRequest lukeRequest = new LukeRequest();
lukeRequest.setShowSchema(true);
LukeResponse process = lukeRequest.process(solr);
Map<String, LukeResponse.FieldInfo> fields = process.getFieldInfo();
for(String fieldName : fields.keySet())
{
LukeResponse.FieldInfo fieldInfo = fields.get(fieldName);
EnumSet<FieldFlag> flags = fieldInfo.getFlags();
for(FieldFlag fieldFlag : flags)
{
if(fieldFlag.getAbbreviation() == FieldFlag.MULTI_VALUED.getAbbreviation())
{
multivaluedFields.add(fieldName);
}
}
}
return multivaluedFields;
} }
@@ -1527,10 +1568,49 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
protected void addAdditionalSolrYearCores(SolrQuery solrQuery) { protected void addAdditionalSolrYearCores(SolrQuery solrQuery) {
//Only add if needed //Only add if needed
if (0 < statisticYearCores.size()) { initSolrYearCores();
if(0 < statisticYearCores.size()){
//The shards are a comma separated list of the urls to the cores //The shards are a comma separated list of the urls to the cores
solrQuery.add(ShardParams.SHARDS, StringUtils.join(statisticYearCores.iterator(), ",")); solrQuery.add(ShardParams.SHARDS, StringUtils.join(statisticYearCores.iterator(), ","));
} }
} }
/*
* The statistics shards should not be initialized until all tomcat webapps are fully initialized.
* DS-3457 uncovered an issue in DSpace 6x in which this code triggered tomcat to hang when statistics shards are present.
* This code is synchonized in the event that 2 threads trigger the initialization at the same time.
*/
protected synchronized void initSolrYearCores() {
if (statisticYearCoresInit) {
return;
}
try
{
//Attempt to retrieve all the statistic year cores
File solrDir = new File(configurationService.getProperty("dspace.dir") + File.separator + "solr" + File.separator);
File[] solrCoreFiles = solrDir.listFiles(new FileFilter() {
@Override
public boolean accept(File file) {
//Core name example: statistics-2008
return file.getName().matches("statistics-\\d\\d\\d\\d");
}
});
//Base url should like : http://localhost:{port.number}/solr
String baseSolrUrl = solr.getBaseURL().replace("statistics", "");
for (File solrCoreFile : solrCoreFiles) {
log.info("Loading core with name: " + solrCoreFile.getName());
createCore(solr, solrCoreFile.getName());
//Add it to our cores list so we can query it !
statisticYearCores.add(baseSolrUrl.replace("http://", "").replace("https://", "") + solrCoreFile.getName());
}
//Also add the core containing the current year !
statisticYearCores.add(solr.getBaseURL().replace("http://", "").replace("https://", ""));
} catch (Exception e) {
log.error(e.getMessage(), e);
}
statisticYearCoresInit = true;
}
} }

View File

@@ -29,7 +29,6 @@ import org.dspace.authorize.AuthorizeException;
import org.dspace.content.*; import org.dspace.content.*;
import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.BitstreamFormatService; import org.dspace.content.service.BitstreamFormatService;
import org.dspace.content.service.BitstreamService;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.curate.Curator; import org.dspace.curate.Curator;
import org.dspace.submit.AbstractProcessingStep; import org.dspace.submit.AbstractProcessingStep;
@@ -47,7 +46,6 @@ import org.dspace.submit.AbstractProcessingStep;
* @see org.dspace.submit.AbstractProcessingStep * @see org.dspace.submit.AbstractProcessingStep
* *
* @author Tim Donohue * @author Tim Donohue
* @version $Revision$
*/ */
public class UploadStep extends AbstractProcessingStep public class UploadStep extends AbstractProcessingStep
{ {
@@ -95,7 +93,7 @@ public class UploadStep extends AbstractProcessingStep
public static final int STATUS_EDIT_COMPLETE = 25; public static final int STATUS_EDIT_COMPLETE = 25;
/** log4j logger */ /** log4j logger */
private static Logger log = Logger.getLogger(UploadStep.class); private static final Logger log = Logger.getLogger(UploadStep.class);
/** is the upload required? */ /** is the upload required? */
protected boolean fileRequired = configurationService.getBooleanProperty("webui.submit.upload.required", true); protected boolean fileRequired = configurationService.getBooleanProperty("webui.submit.upload.required", true);
@@ -614,7 +612,7 @@ public class UploadStep extends AbstractProcessingStep
if (configurationService.getBooleanProperty("submission-curation.virus-scan")) if (configurationService.getBooleanProperty("submission-curation.virus-scan"))
{ {
Curator curator = new Curator(); Curator curator = new Curator();
curator.addTask("vscan").curate(item); curator.addTask("vscan").curate(context, item);
int status = curator.getStatus("vscan"); int status = curator.getStatus("vscan");
if (status == Curator.CURATE_ERROR) if (status == Curator.CURATE_ERROR)
{ {
@@ -652,24 +650,30 @@ public class UploadStep extends AbstractProcessingStep
} }
/* /*
If we created a new Bitstream but now realised there is a problem then remove it. * If we created a new Bitstream but now realise there is a problem then remove it.
*/ */
protected void backoutBitstream(Context context, SubmissionInfo subInfo, Bitstream b, Item item) throws SQLException, AuthorizeException, IOException protected void backoutBitstream(Context context, SubmissionInfo subInfo, Bitstream b, Item item)
throws SQLException, AuthorizeException, IOException
{ {
// remove bitstream from bundle.. // remove bitstream from bundle..
// delete bundle if it's now empty
List<Bundle> bundles = b.getBundles(); List<Bundle> bundles = b.getBundles();
if (bundles.isEmpty())
throw new SQLException("Bitstream is not in any Bundles.");
bundleService.removeBitstream(context, bundles.get(0), b); Bundle firstBundle = bundles.get(0);
List<Bitstream> bitstreams = bundles.get(0).getBitstreams(); bundleService.removeBitstream(context, firstBundle, b);
List<Bitstream> bitstreams = firstBundle.getBitstreams();
// remove bundle if it's now empty // remove bundle if it's now empty
if (bitstreams.size() < 1) if (bitstreams.isEmpty())
{ {
itemService.removeBundle(context, item, bundles.get(0)); itemService.removeBundle(context, item, firstBundle);
itemService.update(context, item); itemService.update(context, item);
} }
else
bundleService.update(context, firstBundle);
subInfo.setBitstream(null); subInfo.setBitstream(null);
} }

View File

@@ -45,6 +45,7 @@ public class SolrImportExport
private static final ThreadLocal<DateFormat> SOLR_DATE_FORMAT; private static final ThreadLocal<DateFormat> SOLR_DATE_FORMAT;
private static final ThreadLocal<DateFormat> SOLR_DATE_FORMAT_NO_MS; private static final ThreadLocal<DateFormat> SOLR_DATE_FORMAT_NO_MS;
private static final ThreadLocal<DateFormat> EXPORT_DATE_FORMAT; private static final ThreadLocal<DateFormat> EXPORT_DATE_FORMAT;
private static final String EXPORT_SEP = "_export_";
static static
{ {
@@ -74,6 +75,7 @@ public class SolrImportExport
private static final String ACTION_OPTION = "a"; private static final String ACTION_OPTION = "a";
private static final String CLEAR_OPTION = "c"; private static final String CLEAR_OPTION = "c";
private static final String OVERWRITE_OPTION = "f";
private static final String DIRECTORY_OPTION = "d"; private static final String DIRECTORY_OPTION = "d";
private static final String HELP_OPTION = "h"; private static final String HELP_OPTION = "h";
private static final String INDEX_NAME_OPTION = "i"; private static final String INDEX_NAME_OPTION = "i";
@@ -82,6 +84,8 @@ public class SolrImportExport
public static final int ROWS_PER_FILE = 10_000; public static final int ROWS_PER_FILE = 10_000;
private static final String MULTIPLE_VALUES_SPLITTER = ",";
private static final Logger log = Logger.getLogger(SolrImportExport.class); private static final Logger log = Logger.getLogger(SolrImportExport.class);
/** /**
@@ -102,12 +106,15 @@ public class SolrImportExport
printHelpAndExit(options, 0); printHelpAndExit(options, 0);
} }
if (!line.hasOption(INDEX_NAME_OPTION)) String[] indexNames = {"statistics"};
if (line.hasOption(INDEX_NAME_OPTION))
{ {
System.err.println("This command requires the index-name option but none was present."); indexNames = line.getOptionValues(INDEX_NAME_OPTION);
printHelpAndExit(options, 1); }
else
{
System.err.println("No index name provided, defaulting to \"statistics\".");
} }
String[] indexNames = line.getOptionValues(INDEX_NAME_OPTION);
String directoryName = makeDirectoryName(line.getOptionValue(DIRECTORY_OPTION)); String directoryName = makeDirectoryName(line.getOptionValue(DIRECTORY_OPTION));
@@ -128,7 +135,7 @@ public class SolrImportExport
{ {
String solrUrl = makeSolrUrl(indexName); String solrUrl = makeSolrUrl(indexName);
boolean clear = line.hasOption(CLEAR_OPTION); boolean clear = line.hasOption(CLEAR_OPTION);
importIndex(indexName, importDir, solrUrl, clear, clear); importIndex(indexName, importDir, solrUrl, clear);
} }
catch (IOException | SolrServerException | SolrImportExportException e) catch (IOException | SolrServerException | SolrImportExportException e)
{ {
@@ -166,7 +173,7 @@ public class SolrImportExport
{ {
String solrUrl = makeSolrUrl(indexName); String solrUrl = makeSolrUrl(indexName);
String timeField = makeTimeField(indexName); String timeField = makeTimeField(indexName);
exportIndex(indexName, exportDir, solrUrl, timeField, lastValue); exportIndex(indexName, exportDir, solrUrl, timeField, lastValue, line.hasOption(OVERWRITE_OPTION));
} }
catch (SolrServerException | IOException | SolrImportExportException e) catch (SolrServerException | IOException | SolrImportExportException e)
{ {
@@ -181,7 +188,8 @@ public class SolrImportExport
{ {
try { try {
boolean keepExport = line.hasOption(KEEP_OPTION); boolean keepExport = line.hasOption(KEEP_OPTION);
reindex(indexName, directoryName, keepExport); boolean overwrite = line.hasOption(OVERWRITE_OPTION);
reindex(indexName, directoryName, keepExport, overwrite);
} catch (IOException | SolrServerException | SolrImportExportException e) { } catch (IOException | SolrServerException | SolrImportExportException e) {
e.printStackTrace(); e.printStackTrace();
} }
@@ -202,22 +210,18 @@ public class SolrImportExport
private static Options makeOptions() { private static Options makeOptions() {
Options options = new Options(); Options options = new Options();
options.addOption(ACTION_OPTION, "action", true, options.addOption(ACTION_OPTION, "action", true, "The action to perform: import, export or reindex. Default: export.");
"The action to perform: import, export or reindex. Default: export."); options.addOption(CLEAR_OPTION, "clear", false, "When importing, also clear the index first. Ignored when action is export or reindex.");
options.addOption(CLEAR_OPTION, "clear", false, options.addOption(OVERWRITE_OPTION, "force-overwrite", false, "When exporting or re-indexing, allow overwrite of existing export files");
"When importing, also clear the index first. Ignored when action is export or reindex.");
options.addOption(DIRECTORY_OPTION, "directory", true, options.addOption(DIRECTORY_OPTION, "directory", true,
"The absolute path for the directory to use for import or export. If omitted, [dspace]/solr-export is used."); "The absolute path for the directory to use for import or export. If omitted, [dspace]/solr-export is used.");
options.addOption(HELP_OPTION, "help", false, options.addOption(HELP_OPTION, "help", false, "Get help on options for this command.");
"Get help on options for this command.");
options.addOption(INDEX_NAME_OPTION, "index-name", true, options.addOption(INDEX_NAME_OPTION, "index-name", true,
"The names of the indexes to process. At least one is required. Available indexes are: authority, statistics."); "The names of the indexes to process. At least one is required. Available indexes are: authority, statistics.");
options.addOption(KEEP_OPTION, "keep", false, options.addOption(KEEP_OPTION, "keep", false, "When reindexing, keep the contents of the data export directory." +
"When reindexing, keep the contents of the data export directory." +
" By default, the contents of this directory will be deleted once the reindex has finished." + " By default, the contents of this directory will be deleted once the reindex has finished." +
" Ignored when action is export or import."); " Ignored when action is export or import.");
options.addOption(LAST_OPTION, "last", true, options.addOption(LAST_OPTION, "last", true, "When exporting, export records from the last [timeperiod] only." +
"When exporting, export records from the last [timeperiod] only." +
" This can be one of: 'd' (beginning of yesterday through to now);" + " This can be one of: 'd' (beginning of yesterday through to now);" +
" 'm' (beginning of the previous month through to end of the previous month);" + " 'm' (beginning of the previous month through to end of the previous month);" +
" a number, in which case the last [number] of days are exported, through to now (use 0 for today's data)." + " a number, in which case the last [number] of days are exported, through to now (use 0 for today's data)." +
@@ -232,8 +236,9 @@ public class SolrImportExport
* @param exportDirName the name of the directory to use for export. If this directory doesn't exist, it will be created. * @param exportDirName the name of the directory to use for export. If this directory doesn't exist, it will be created.
* @param keepExport whether to keep the contents of the exportDir after the reindex. If keepExport is false and the * @param keepExport whether to keep the contents of the exportDir after the reindex. If keepExport is false and the
* export directory was created by this method, the export directory will be deleted at the end of the reimport. * export directory was created by this method, the export directory will be deleted at the end of the reimport.
* @param overwrite allow export files to be overwritten during re-index
*/ */
private static void reindex(String indexName, String exportDirName, boolean keepExport) private static void reindex(String indexName, String exportDirName, boolean keepExport, boolean overwrite)
throws IOException, SolrServerException, SolrImportExportException { throws IOException, SolrServerException, SolrImportExportException {
String tempIndexName = indexName + "-temp"; String tempIndexName = indexName + "-temp";
@@ -241,7 +246,10 @@ public class SolrImportExport
String baseSolrUrl = StringUtils.substringBeforeLast(origSolrUrl, "/"); // need to get non-core solr URL String baseSolrUrl = StringUtils.substringBeforeLast(origSolrUrl, "/"); // need to get non-core solr URL
String tempSolrUrl = baseSolrUrl + "/" + tempIndexName; String tempSolrUrl = baseSolrUrl + "/" + tempIndexName;
String solrInstanceDir = ConfigurationManager.getProperty("dspace.dir") + File.separator + "solr" + File.separator + indexName; //The configuration details for the statistics shards reside within the "statistics" folder
String instanceIndexName = indexName.startsWith("statistics-") ? "statistics" : indexName;
String solrInstanceDir = ConfigurationManager.getProperty("dspace.dir") + File.separator + "solr" + File.separator + instanceIndexName;
// the [dspace]/solr/[indexName]/conf directory needs to be available on the local machine for this to work // the [dspace]/solr/[indexName]/conf directory needs to be available on the local machine for this to work
// -- we need access to the schema.xml and solrconfig.xml file, plus files referenced from there // -- we need access to the schema.xml and solrconfig.xml file, plus files referenced from there
// if this directory can't be found, output an error message and skip this index // if this directory can't be found, output an error message and skip this index
@@ -327,10 +335,10 @@ public class SolrImportExport
try try
{ {
// export from the actual core (from temp core name, actual data dir) // export from the actual core (from temp core name, actual data dir)
exportIndex(indexName, exportDir, tempSolrUrl, timeField); exportIndex(indexName, exportDir, tempSolrUrl, timeField, overwrite);
// clear actual core (temp core name, clearing actual data dir) & import // clear actual core (temp core name, clearing actual data dir) & import
importIndex(indexName, exportDir, tempSolrUrl, true, true); importIndex(indexName, exportDir, tempSolrUrl, true);
} }
catch (Exception e) catch (Exception e)
{ {
@@ -352,9 +360,9 @@ public class SolrImportExport
// export all docs from now-temp core into export directory -- this won't cause name collisions with the actual export // export all docs from now-temp core into export directory -- this won't cause name collisions with the actual export
// because the core name for the temporary export has -temp in it while the actual core doesn't // because the core name for the temporary export has -temp in it while the actual core doesn't
exportIndex(tempIndexName, exportDir, tempSolrUrl, timeField); exportIndex(tempIndexName, exportDir, tempSolrUrl, timeField, overwrite);
// ...and import them into the now-again-actual core *without* clearing // ...and import them into the now-again-actual core *without* clearing
importIndex(tempIndexName, exportDir, origSolrUrl, false, true); importIndex(tempIndexName, exportDir, origSolrUrl, false);
// commit changes // commit changes
origSolr.commit(); origSolr.commit();
@@ -386,13 +394,14 @@ public class SolrImportExport
* @param toDir The target directory for the export. Will be created if it doesn't exist yet. The directory must be writeable. * @param toDir The target directory for the export. Will be created if it doesn't exist yet. The directory must be writeable.
* @param solrUrl The solr URL for the index to export. Must not be null. * @param solrUrl The solr URL for the index to export. Must not be null.
* @param timeField The time field to use for sorting the export. Must not be null. * @param timeField The time field to use for sorting the export. Must not be null.
* @param overwrite If set, allow export files to be overwritten
* @throws SolrServerException if there is a problem with exporting the index. * @throws SolrServerException if there is a problem with exporting the index.
* @throws IOException if there is a problem creating the files or communicating with Solr. * @throws IOException if there is a problem creating the files or communicating with Solr.
* @throws SolrImportExportException if there is a problem in communicating with Solr. * @throws SolrImportExportException if there is a problem in communicating with Solr.
*/ */
public static void exportIndex(String indexName, File toDir, String solrUrl, String timeField) public static void exportIndex(String indexName, File toDir, String solrUrl, String timeField, boolean overwrite)
throws SolrServerException, SolrImportExportException, IOException { throws SolrServerException, SolrImportExportException, IOException {
exportIndex(indexName, toDir, solrUrl, timeField, null); exportIndex(indexName, toDir, solrUrl, timeField, null, overwrite);
} }
/** /**
@@ -408,7 +417,7 @@ public class SolrImportExport
* @throws SolrServerException if there is a problem reading the files or communicating with Solr. * @throws SolrServerException if there is a problem reading the files or communicating with Solr.
* @throws SolrImportExportException if there is a problem communicating with Solr. * @throws SolrImportExportException if there is a problem communicating with Solr.
*/ */
public static void importIndex(final String indexName, File fromDir, String solrUrl, boolean clear, boolean overwrite) public static void importIndex(final String indexName, File fromDir, String solrUrl, boolean clear)
throws IOException, SolrServerException, SolrImportExportException throws IOException, SolrServerException, SolrImportExportException
{ {
if (StringUtils.isBlank(solrUrl)) if (StringUtils.isBlank(solrUrl))
@@ -438,7 +447,7 @@ public class SolrImportExport
@Override @Override
public boolean accept(File dir, String name) public boolean accept(File dir, String name)
{ {
return name.startsWith(indexName) && name.endsWith(".csv"); return name.startsWith(indexName + EXPORT_SEP) && name.endsWith(".csv");
} }
}); });
@@ -454,13 +463,10 @@ public class SolrImportExport
{ {
log.info("Importing file " + file.getCanonicalPath()); log.info("Importing file " + file.getCanonicalPath());
ContentStreamUpdateRequest contentStreamUpdateRequest = new ContentStreamUpdateRequest("/update/csv"); ContentStreamUpdateRequest contentStreamUpdateRequest = new ContentStreamUpdateRequest("/update/csv");
if (overwrite)
{
contentStreamUpdateRequest.setParam("skip", "_version_"); contentStreamUpdateRequest.setParam("skip", "_version_");
}
for (String mvField : multivaluedFields) { for (String mvField : multivaluedFields) {
contentStreamUpdateRequest.setParam("f." + mvField + ".split", "true"); contentStreamUpdateRequest.setParam("f." + mvField + ".split", "true");
contentStreamUpdateRequest.setParam("f." + mvField + ".escape", "\\"); contentStreamUpdateRequest.setParam("f." + mvField + ".separator", MULTIPLE_VALUES_SPLITTER);
} }
contentStreamUpdateRequest.setParam("stream.contentType", "text/csv;charset=utf-8"); contentStreamUpdateRequest.setParam("stream.contentType", "text/csv;charset=utf-8");
contentStreamUpdateRequest.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true); contentStreamUpdateRequest.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true);
@@ -525,13 +531,15 @@ public class SolrImportExport
* @param solrUrl The solr URL for the index to export. Must not be null. * @param solrUrl The solr URL for the index to export. Must not be null.
* @param timeField The time field to use for sorting the export. Must not be null. * @param timeField The time field to use for sorting the export. Must not be null.
* @param fromWhen Optionally, from when to export. See options for allowed values. If null or empty, all documents will be exported. * @param fromWhen Optionally, from when to export. See options for allowed values. If null or empty, all documents will be exported.
* @param overwrite If set, allow export files to be overwritten
* @throws SolrServerException if there is a problem with exporting the index. * @throws SolrServerException if there is a problem with exporting the index.
* @throws IOException if there is a problem creating the files or communicating with Solr. * @throws IOException if there is a problem creating the files or communicating with Solr.
* @throws SolrImportExportException if there is a problem in communicating with Solr. * @throws SolrImportExportException if there is a problem in communicating with Solr.
*/ */
public static void exportIndex(String indexName, File toDir, String solrUrl, String timeField, String fromWhen) public static void exportIndex(String indexName, File toDir, String solrUrl, String timeField, String fromWhen, boolean overwrite)
throws SolrServerException, IOException, SolrImportExportException throws SolrServerException, IOException, SolrImportExportException
{ {
log.info(String.format("Export Index [%s] to [%s] using [%s] Time Field[%s] FromWhen[%s]", indexName, toDir, solrUrl, timeField, fromWhen));
if (StringUtils.isBlank(solrUrl)) if (StringUtils.isBlank(solrUrl))
{ {
throw new SolrImportExportException("Could not construct solr URL for index" + indexName + ", aborting export."); throw new SolrImportExportException("Could not construct solr URL for index" + indexName + ", aborting export.");
@@ -560,12 +568,14 @@ public class SolrImportExport
query.setGetFieldStatistics(timeField); query.setGetFieldStatistics(timeField);
Map<String, FieldStatsInfo> fieldInfo = solr.query(query).getFieldStatsInfo(); Map<String, FieldStatsInfo> fieldInfo = solr.query(query).getFieldStatsInfo();
if (fieldInfo == null || !fieldInfo.containsKey(timeField)) { if (fieldInfo == null || !fieldInfo.containsKey(timeField)) {
log.warn("Cannot get earliest date, not exporting index " + indexName + ", time field " + timeField + ", from " + fromWhen); log.warn(String.format("Queried [%s]. No fieldInfo found while exporting index [%s] time field [%s] from [%s]. Export cancelled.",
solrUrl, indexName, timeField, fromWhen));
return; return;
} }
FieldStatsInfo timeFieldInfo = fieldInfo.get(timeField); FieldStatsInfo timeFieldInfo = fieldInfo.get(timeField);
if (timeFieldInfo == null || timeFieldInfo.getMin() == null) { if (timeFieldInfo == null || timeFieldInfo.getMin() == null) {
log.warn("Cannot get earliest date, not exporting index " + indexName + ", time field " + timeField + ", from " + fromWhen); log.warn(String.format("Queried [%s]. No earliest date found while exporting index [%s] time field [%s] from [%s]. Export cancelled.",
solrUrl, indexName, timeField, fromWhen));
return; return;
} }
Date earliestTimestamp = (Date) timeFieldInfo.getMin(); Date earliestTimestamp = (Date) timeFieldInfo.getMin();
@@ -599,6 +609,7 @@ public class SolrImportExport
monthQuery.setRows(ROWS_PER_FILE); monthQuery.setRows(ROWS_PER_FILE);
monthQuery.set("wt", "csv"); monthQuery.set("wt", "csv");
monthQuery.set("fl", "*"); monthQuery.set("fl", "*");
monthQuery.setParam("csv.mv.separator", MULTIPLE_VALUES_SPLITTER);
monthQuery.addFilterQuery(timeField + ":[" +monthStart + " TO " + monthStart + "+1MONTH]"); monthQuery.addFilterQuery(timeField + ":[" +monthStart + " TO " + monthStart + "+1MONTH]");
@@ -608,17 +619,23 @@ public class SolrImportExport
URL url = new URL(solrUrl + "/select?" + monthQuery.toString()); URL url = new URL(solrUrl + "/select?" + monthQuery.toString());
File file = new File(toDir.getCanonicalPath(), makeExportFilename(indexName, monthStartDate, docsThisMonth, i)); File file = new File(toDir.getCanonicalPath(), makeExportFilename(indexName, monthStartDate, docsThisMonth, i));
if (file.createNewFile()) if (file.createNewFile() || overwrite)
{ {
FileUtils.copyURLToFile(url, file); FileUtils.copyURLToFile(url, file);
log.info("Exported batch " + i + " to " + file.getCanonicalPath()); String message = String.format("Solr export to file [%s] complete. Export for Index [%s] Month [%s] Batch [%d] Num Docs [%d]",
file.getCanonicalPath(), indexName, monthStart, i, docsThisMonth);
log.info(message);
}
else if (file.exists())
{
String message = String.format("Solr export file [%s] already exists. Export failed for Index [%s] Month [%s] Batch [%d] Num Docs [%d]",
file.getCanonicalPath(), indexName, monthStart, i, docsThisMonth);
throw new SolrImportExportException(message);
} }
else else
{ {
throw new SolrImportExportException("Could not create file " + file.getCanonicalPath() String message = String.format("Cannot create solr export file [%s]. Export failed for Index [%s] Month [%s] Batch [%d] Num Docs [%d]",
+ " while exporting index " + indexName file.getCanonicalPath(), indexName, monthStart, i, docsThisMonth); throw new SolrImportExportException(message);
+ ", month" + monthStart
+ ", batch " + i);
} }
} }
} }
@@ -681,7 +698,7 @@ public class SolrImportExport
exportFileNumber = StringUtils.leftPad("" + (index / ROWS_PER_FILE), (int) Math.ceil(Math.log10(totalRecords / ROWS_PER_FILE)), "0"); exportFileNumber = StringUtils.leftPad("" + (index / ROWS_PER_FILE), (int) Math.ceil(Math.log10(totalRecords / ROWS_PER_FILE)), "0");
} }
return indexName return indexName
+ "_export_" + EXPORT_SEP
+ EXPORT_DATE_FORMAT.get().format(exportStart) + EXPORT_DATE_FORMAT.get().format(exportStart)
+ (StringUtils.isNotBlank(exportFileNumber) ? "_" + exportFileNumber : "") + (StringUtils.isNotBlank(exportFileNumber) ? "_" + exportFileNumber : "")
+ ".csv"; + ".csv";
@@ -738,6 +755,10 @@ public class SolrImportExport
{ {
HelpFormatter myhelp = new HelpFormatter(); HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp(SolrImportExport.class.getSimpleName() + "\n", options); myhelp.printHelp(SolrImportExport.class.getSimpleName() + "\n", options);
System.out.println("\n\nCommand Defaults");
System.out.println("\tsolr-export-statistics [-a export] [-i statistics]");
System.out.println("\tsolr-import-statistics [-a import] [-i statistics]");
System.out.println("\tsolr-reindex-statistics [-a reindex] [-i statistics]");
System.exit(exitCode); System.exit(exitCode);
} }
} }

View File

@@ -0,0 +1,17 @@
--
-- The contents of this file are subject to the license and copyright
-- detailed in the LICENSE and NOTICE files at the root of the source
-- tree and available online at
--
-- http://www.dspace.org/license/
--
---------------------------------------------------------------
-- DS-3410
---------------------------------------------------------------
-- This script will create lost indexes
---------------------------------------------------------------
CREATE INDEX resourcepolicy_object on resourcepolicy(dspace_object);
CREATE INDEX metadatavalue_object on metadatavalue(dspace_object_id);
CREATE INDEX metadatavalue_field_object on metadatavalue(metadata_field_id, dspace_object_id);

View File

@@ -0,0 +1,16 @@
--
-- The contents of this file are subject to the license and copyright
-- detailed in the LICENSE and NOTICE files at the root of the source
-- tree and available online at
--
-- http://www.dspace.org/license/
--
------------------------------------------------------
-- DS-3097 Handle of collections and communities are lost due to bug at V6.0_2015.03.07__DS-2701_Hibernate_migration.sql
------------------------------------------------------
UPDATE handle SET resource_id = (SELECT community.uuid FROM community WHERE handle.resource_legacy_id = community.community_id AND handle.resource_type_id = 4) where handle.resource_type_id = 4;
UPDATE handle SET resource_id = (SELECT collection.uuid FROM collection WHERE handle.resource_legacy_id = collection.collection_id AND handle.resource_type_id = 3) where handle.resource_type_id = 3;
UPDATE handle SET resource_id = (SELECT item.uuid FROM item WHERE handle.resource_legacy_id = item.item_id AND handle.resource_type_id = 2) where handle.resource_type_id = 2;

View File

@@ -19,6 +19,10 @@ import org.dspace.servicemanager.DSpaceKernelInit;
import org.junit.AfterClass; import org.junit.AfterClass;
import static org.junit.Assert.fail; import static org.junit.Assert.fail;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.runner.RunWith;
import mockit.integration.junit4.JMockit;
/** /**
* DSpace Unit Tests need to initialize the DSpace Kernel / Service Mgr * DSpace Unit Tests need to initialize the DSpace Kernel / Service Mgr
@@ -33,6 +37,8 @@ import org.junit.BeforeClass;
* @see AbstractIntegrationTest * @see AbstractIntegrationTest
* @author Tim * @author Tim
*/ */
@Ignore
@RunWith(JMockit.class)
public class AbstractDSpaceTest public class AbstractDSpaceTest
{ {
/** log4j category */ /** log4j category */

View File

@@ -8,6 +8,7 @@
package org.dspace; package org.dspace;
import org.databene.contiperf.junit.ContiPerfRule; import org.databene.contiperf.junit.ContiPerfRule;
import org.junit.Ignore;
import org.junit.Rule; import org.junit.Rule;
/** /**
@@ -20,6 +21,7 @@ import org.junit.Rule;
* *
* @author pvillega * @author pvillega
*/ */
@Ignore
public class AbstractIntegrationTest extends AbstractUnitTest public class AbstractIntegrationTest extends AbstractUnitTest
{ {

View File

@@ -21,6 +21,7 @@ import org.dspace.storage.rdbms.DatabaseUtils;
import org.junit.After; import org.junit.After;
import org.junit.Before; import org.junit.Before;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Ignore;
import java.sql.SQLException; import java.sql.SQLException;
@@ -38,6 +39,7 @@ import static org.junit.Assert.fail;
* @see AbstractDSpaceTest * @see AbstractDSpaceTest
* @author pvillega * @author pvillega
*/ */
@Ignore
public class AbstractUnitTest extends AbstractDSpaceTest public class AbstractUnitTest extends AbstractDSpaceTest
{ {
/** log4j category */ /** log4j category */

View File

@@ -67,7 +67,7 @@ public class RestIndex {
"<ul>" + "<ul>" +
"<li>GET / - Return this page.</li>" + "<li>GET / - Return this page.</li>" +
"<li>GET /test - Return the string \"REST api is running\" for testing purposes.</li>" + "<li>GET /test - Return the string \"REST api is running\" for testing purposes.</li>" +
"<li>POST /login - Method for logging into the DSpace RESTful API. You must post User class. Example: {\"email\":\"test@dspace\",\"password\":\"pass\"}. Returns a token which must be included in future requests in the \"rest-dspace-token\" header.</li>" + "<li>POST /login - Method for logging into the DSpace RESTful API. You must post the parameters \"email\" and \"password\". Example: \"email=test@dspace&password=pass\". Returns a JSESSIONID cookie which can be used for future authenticated requests.</li>" +
"<li>POST /logout - Method for logging out of the DSpace RESTful API. The request must include the \"rest-dspace-token\" token</li> header." + "<li>POST /logout - Method for logging out of the DSpace RESTful API. The request must include the \"rest-dspace-token\" token</li> header." +
"</ul>" + "</ul>" +
"<h2>Communities</h2>" + "<h2>Communities</h2>" +

View File

@@ -103,7 +103,7 @@ public final class SpringServiceManager implements ServiceManagerSystem {
bean = (T) applicationContext.getBean(name, type); bean = (T) applicationContext.getBean(name, type);
} catch (BeansException e) { } catch (BeansException e) {
// no luck, try the fall back option // no luck, try the fall back option
log.info("Unable to locate bean by name or id=" + name + ". Will try to look up bean by type next."); log.info("Unable to locate bean by name or id=" + name + ". Will try to look up bean by type next. BeansException: " + e.getMessage());
bean = null; bean = null;
} }
} else { } else {
@@ -112,7 +112,7 @@ public final class SpringServiceManager implements ServiceManagerSystem {
bean = (T) applicationContext.getBean(type.getName(), type); bean = (T) applicationContext.getBean(type.getName(), type);
} catch (BeansException e) { } catch (BeansException e) {
// no luck, try the fall back option // no luck, try the fall back option
log.info("Unable to locate bean by name or id=" + type.getName() + ". Will try to look up bean by type next."); log.info("Unable to locate bean by name or id=" + type.getName() + ". Will try to look up bean by type next. BeansException: " + e.getMessage());
bean = null; bean = null;
} }
} }

View File

@@ -96,7 +96,7 @@ public class SwordMETSContentIngester extends AbstractSwordContentIngester
File depositFile = deposit.getFile(); File depositFile = deposit.getFile();
// load the plugin manager for the required configuration // load the plugin manager for the required configuration
String cfg = ConfigurationManager.getProperty("sword-server", String cfg = ConfigurationManager.getProperty("swordv2-server",
"mets-ingester.package-ingester"); "mets-ingester.package-ingester");
if (cfg == null || "".equals(cfg)) if (cfg == null || "".equals(cfg))
{ {
@@ -117,7 +117,7 @@ public class SwordMETSContentIngester extends AbstractSwordContentIngester
// Should restore mode be enabled, i.e. keep existing handle? // Should restore mode be enabled, i.e. keep existing handle?
if (ConfigurationManager.getBooleanProperty( if (ConfigurationManager.getBooleanProperty(
"sword-server", "restore-mode.enable", false)) "swordv2-server", "restore-mode.enable", false))
{ {
params.setRestoreModeEnabled(true); params.setRestoreModeEnabled(true);
} }
@@ -226,7 +226,7 @@ public class SwordMETSContentIngester extends AbstractSwordContentIngester
// load the plugin manager for the required configuration // load the plugin manager for the required configuration
String cfg = ConfigurationManager.getProperty( String cfg = ConfigurationManager.getProperty(
"sword-server", "mets-ingester.package-ingester"); "swordv2-server", "mets-ingester.package-ingester");
if (cfg == null || "".equals(cfg)) if (cfg == null || "".equals(cfg))
{ {
cfg = "METS"; // default to METS cfg = "METS"; // default to METS
@@ -246,7 +246,7 @@ public class SwordMETSContentIngester extends AbstractSwordContentIngester
// Should restore mode be enabled, i.e. keep existing handle? // Should restore mode be enabled, i.e. keep existing handle?
if (ConfigurationManager.getBooleanProperty( if (ConfigurationManager.getBooleanProperty(
"sword-server", "restore-mode.enable", false)) "swordv2-server", "restore-mode.enable", false))
{ {
params.setRestoreModeEnabled(true); params.setRestoreModeEnabled(true);
} }

View File

@@ -45,6 +45,9 @@ dspace.url = ${dspace.baseUrl}/${dspace.ui}
# Name of the site # Name of the site
dspace.name = DSpace at My University dspace.name = DSpace at My University
# assetstore.dir, look at DSPACE/config/spring/api/bitstore.xml for more options
assetstore.dir = ${dspace.dir}/assetstore
# Default language for metadata values # Default language for metadata values
default.language = en_US default.language = en_US

View File

@@ -222,8 +222,6 @@
<class>org.dspace.util.SolrImportExport</class> <class>org.dspace.util.SolrImportExport</class>
<argument>-a</argument> <argument>-a</argument>
<argument>export</argument> <argument>export</argument>
<argument>-i</argument>
<argument>statistics</argument>
</step> </step>
</command> </command>
<command> <command>
@@ -233,8 +231,6 @@
<class>org.dspace.util.SolrImportExport</class> <class>org.dspace.util.SolrImportExport</class>
<argument>-a</argument> <argument>-a</argument>
<argument>import</argument> <argument>import</argument>
<argument>-i</argument>
<argument>statistics</argument>
</step> </step>
</command> </command>
<command> <command>
@@ -244,8 +240,6 @@
<class>org.dspace.util.SolrImportExport</class> <class>org.dspace.util.SolrImportExport</class>
<argument>-a</argument> <argument>-a</argument>
<argument>reindex</argument> <argument>reindex</argument>
<argument>-i</argument>
<argument>statistics</argument>
</step> </step>
</command> </command>
<command> <command>

View File

@@ -49,6 +49,10 @@ dspace.ui = xmlui
# Name of the site # Name of the site
dspace.name = DSpace at My University dspace.name = DSpace at My University
# assetstore.dir, look at DSPACE/config/spring/api/bitstore.xml for more options
# default is ${dspace.dir}/assetstore, uncomment and modify if you need to use a different path
#assetstore.dir = ${dspace.dir}/assetstore
# Default language for metadata values # Default language for metadata values
#default.language = en_US #default.language = en_US

View File

@@ -14,7 +14,7 @@
</bean> </bean>
<bean name="localStore" class="org.dspace.storage.bitstore.DSBitStoreService" scope="singleton"> <bean name="localStore" class="org.dspace.storage.bitstore.DSBitStoreService" scope="singleton">
<property name="baseDir" value="${dspace.dir}/assetstore"/> <property name="baseDir" value="${assetstore.dir}"/>
</bean> </bean>
<bean name="s3Store" class="org.dspace.storage.bitstore.S3BitStoreService" scope="singleton"> <bean name="s3Store" class="org.dspace.storage.bitstore.S3BitStoreService" scope="singleton">
@@ -36,4 +36,4 @@
<!-- <bean name="localStore2 ... --> <!-- <bean name="localStore2 ... -->
<!-- <bean name="s3Store2 ... --> <!-- <bean name="s3Store2 ... -->
</beans> </beans>