[DS-1243] @mire solr statistics contribution

This commit is contained in:
KevinVdV
2012-09-25 15:58:23 +02:00
parent ab37bd4a51
commit 808bc6fc5d
52 changed files with 5796 additions and 2994 deletions

View File

@@ -103,7 +103,8 @@ public class DSIndexer
private static int batchFlushAfterDocuments = ConfigurationManager.getIntProperty("search.batch.documents", 20); private static int batchFlushAfterDocuments = ConfigurationManager.getIntProperty("search.batch.documents", 20);
private static boolean batchProcessingMode = false; private static boolean batchProcessingMode = false;
static final Version luceneVersion = Version.LUCENE_35;
// Class to hold the index configuration (one instance per config line) // Class to hold the index configuration (one instance per config line)
private static class IndexConfig private static class IndexConfig
{ {
@@ -637,7 +638,7 @@ public class DSIndexer
Class analyzerClass = Class.forName(analyzerClassName); Class analyzerClass = Class.forName(analyzerClassName);
Constructor constructor = analyzerClass.getDeclaredConstructor(Version.class); Constructor constructor = analyzerClass.getDeclaredConstructor(Version.class);
constructor.setAccessible(true); constructor.setAccessible(true);
analyzer = (Analyzer) constructor.newInstance(Version.LUCENE_33); analyzer = (Analyzer) constructor.newInstance(luceneVersion);
} }
catch (Exception e) catch (Exception e)
{ {
@@ -914,7 +915,7 @@ public class DSIndexer
throws IOException throws IOException
{ {
Directory dir = FSDirectory.open(new File(indexDirectory)); Directory dir = FSDirectory.open(new File(indexDirectory));
IndexWriterConfig iwc = new IndexWriterConfig(Version.LUCENE_33, getAnalyzer()); IndexWriterConfig iwc = new IndexWriterConfig(luceneVersion, getAnalyzer());
if(wipeExisting){ if(wipeExisting){
iwc.setOpenMode(IndexWriterConfig.OpenMode.CREATE); iwc.setOpenMode(IndexWriterConfig.OpenMode.CREATE);
}else{ }else{

View File

@@ -122,7 +122,7 @@ public class DSQuery
// grab a searcher, and do the search // grab a searcher, and do the search
IndexSearcher searcher = getSearcher(c); IndexSearcher searcher = getSearcher(c);
QueryParser qp = new QueryParser(Version.LUCENE_33, "default", DSIndexer.getAnalyzer()); QueryParser qp = new QueryParser(DSIndexer.luceneVersion, "default", DSIndexer.getAnalyzer());
log.debug("Final query string: " + querystring); log.debug("Final query string: " + querystring);
if (operator == null || operator.equals("OR")) if (operator == null || operator.equals("OR"))

View File

@@ -28,7 +28,9 @@ public class LoggerUsageEventListener extends AbstractUsageEventListener{
public void receiveEvent(Event event) { public void receiveEvent(Event event) {
if(event instanceof UsageEvent) //Search events are already logged
//UsageSearchEvent is already logged in the search classes, no need to repeat this logging
if(event instanceof UsageEvent && !(event instanceof UsageSearchEvent))
{ {
UsageEvent ue = (UsageEvent)event; UsageEvent ue = (UsageEvent)event;

View File

@@ -30,6 +30,7 @@ public class UsageEvent extends Event {
REMOVE ("remove"), REMOVE ("remove"),
BROWSE ("browse"), BROWSE ("browse"),
SEARCH ("search"), SEARCH ("search"),
WORKFLOW ("workflow"),
LOGIN ("login"), LOGIN ("login"),
SUBSCRIBE ("subscribe"), SUBSCRIBE ("subscribe"),
UNSUBSCRIBE ("unsubscribe"), UNSUBSCRIBE ("unsubscribe"),
@@ -59,12 +60,13 @@ public class UsageEvent extends Event {
private static String checkParams(Action action, HttpServletRequest request, Context context, DSpaceObject object) private static String checkParams(Action action, HttpServletRequest request, Context context, DSpaceObject object)
{ {
StringBuilder eventName = new StringBuilder();
if(action == null) if(action == null)
{ {
throw new IllegalStateException("action cannot be null"); throw new IllegalStateException("action cannot be null");
} }
if(request == null) if(action != Action.WORKFLOW && request == null)
{ {
throw new IllegalStateException("request cannot be null"); throw new IllegalStateException("request cannot be null");
} }
@@ -75,21 +77,17 @@ public class UsageEvent extends Event {
throw new IllegalStateException("context cannot be null"); throw new IllegalStateException("context cannot be null");
} }
if(object == null) if(action != Action.WORKFLOW && action != Action.SEARCH && object == null)
{ {
throw new IllegalStateException("object cannot be null"); throw new IllegalStateException("object cannot be null");
}else
if(object != null){
String objText = Constants.typeText[object.getType()].toLowerCase();
eventName.append(objText).append(":");
} }
eventName.append(action.text());
try
{
String objText = Constants.typeText[object.getType()].toLowerCase();
return objText + ":" + action.text();
}catch(Exception e)
{
} return eventName.toString();
return "";
} }
public UsageEvent(Action action, HttpServletRequest request, Context context, DSpaceObject object) public UsageEvent(Action action, HttpServletRequest request, Context context, DSpaceObject object)

View File

@@ -0,0 +1,86 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.usage;
import org.dspace.content.DSpaceObject;
import org.dspace.core.Context;
import javax.servlet.http.HttpServletRequest;
import java.util.List;
/**
* Extends the standard usage event to contain search information
* search information includes the query(s) used & the scope
*
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class UsageSearchEvent extends UsageEvent{
private List<String> queries;
private DSpaceObject scope;
/** Optional search parameters **/
private int rpp;
private String sortBy;
private String sortOrder;
private int page;
public UsageSearchEvent(Action action, HttpServletRequest request, Context context, DSpaceObject object, List<String> queries, DSpaceObject scope) {
super(action, request, context, object);
this.queries = queries;
this.scope = scope;
this.rpp = -1;
this.sortBy = null;
this.sortOrder = null;
this.page = -1;
}
public List<String> getQueries() {
return queries;
}
public DSpaceObject getScope() {
return scope;
}
public int getRpp() {
return rpp;
}
public void setRpp(int rpp) {
this.rpp = rpp;
}
public String getSortBy() {
return sortBy;
}
public void setSortBy(String sortBy) {
this.sortBy = sortBy;
}
public String getSortOrder() {
return sortOrder;
}
public void setSortOrder(String sortOrder) {
this.sortOrder = sortOrder;
}
public int getPage() {
return page;
}
public void setPage(int page) {
this.page = page;
}
}

View File

@@ -0,0 +1,80 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.usage;
import org.dspace.content.Collection;
import org.dspace.content.InProgressSubmission;
import org.dspace.content.Item;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
/**
* Extends the standard usage event to contain workflow information
*
*
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class UsageWorkflowEvent extends UsageEvent {
private String workflowStep;
private String oldState;
private EPerson[] epersonOwners;
private Group[] groupOwners;
private Collection scope;
private EPerson actor;
private InProgressSubmission workflowItem;
public UsageWorkflowEvent(Context context, Item item, InProgressSubmission workflowItem, String workflowStep, String oldState, Collection scope, EPerson actor) {
super(Action.WORKFLOW, null, context, item);
this.workflowItem = workflowItem;
this.workflowStep = workflowStep;
this.oldState = oldState;
this.scope = scope;
this.actor = actor;
}
public String getWorkflowStep() {
return workflowStep;
}
public String getOldState() {
return oldState;
}
public Collection getScope() {
return scope;
}
public EPerson[] getEpersonOwners() {
return epersonOwners;
}
public void setEpersonOwners(EPerson... epersonOwners) {
this.epersonOwners = epersonOwners;
}
public Group[] getGroupOwners() {
return groupOwners;
}
public void setGroupOwners(Group... newGroupOwner) {
this.groupOwners = newGroupOwner;
}
public EPerson getActor() {
return actor;
}
public InProgressSubmission getWorkflowItem() {
return workflowItem;
}
}

View File

@@ -41,6 +41,8 @@ import org.dspace.handle.HandleManager;
import org.dspace.storage.rdbms.DatabaseManager; import org.dspace.storage.rdbms.DatabaseManager;
import org.dspace.storage.rdbms.TableRow; import org.dspace.storage.rdbms.TableRow;
import org.dspace.storage.rdbms.TableRowIterator; import org.dspace.storage.rdbms.TableRowIterator;
import org.dspace.usage.UsageWorkflowEvent;
import org.dspace.utils.DSpace;
/** /**
* Workflow state machine * Workflow state machine
@@ -518,6 +520,9 @@ public class WorkflowManager
Group mygroup = null; Group mygroup = null;
boolean archived = false; boolean archived = false;
//Gather our old data for launching the workflow event
int oldState = wi.getState();
wi.setState(newstate); wi.setState(newstate);
switch (newstate) switch (newstate)
@@ -657,6 +662,8 @@ public class WorkflowManager
break; break;
} }
logWorkflowEvent(c, wi.getItem(), wi, c.getCurrentUser(), newstate, newowner, mycollection, oldState, mygroup);
if (!archived) if (!archived)
{ {
wi.update(); wi.update();
@@ -665,6 +672,22 @@ public class WorkflowManager
return archived; return archived;
} }
private static void logWorkflowEvent(Context c, Item item, WorkflowItem workflowItem, EPerson actor, int newstate, EPerson newOwner, Collection mycollection, int oldState, Group newOwnerGroup) {
if(newstate == WFSTATE_ARCHIVE || newstate == WFSTATE_STEP1POOL || newstate == WFSTATE_STEP2POOL || newstate == WFSTATE_STEP3POOL){
//Clear the newowner variable since this one isn't owned anymore !
newOwner = null;
}
UsageWorkflowEvent usageWorkflowEvent = new UsageWorkflowEvent(c, item, workflowItem, workflowText[newstate], workflowText[oldState], mycollection, actor);
if(newOwner != null){
usageWorkflowEvent.setEpersonOwners(newOwner);
}
if(newOwnerGroup != null){
usageWorkflowEvent.setGroupOwners(newOwnerGroup);
}
new DSpace().getEventService().fireEvent(usageWorkflowEvent);
}
/** /**
* Get the text representing the given workflow state * Get the text representing the given workflow state
* *
@@ -816,6 +839,8 @@ public class WorkflowManager
String rejection_message) throws SQLException, AuthorizeException, String rejection_message) throws SQLException, AuthorizeException,
IOException IOException
{ {
int oldState = wi.getState();
// authorize a DSpaceActions.REJECT // authorize a DSpaceActions.REJECT
// stop workflow // stop workflow
deleteTasks(c, wi); deleteTasks(c, wi);
@@ -848,6 +873,8 @@ public class WorkflowManager
+ "collection_id=" + wi.getCollection().getID() + "eperson_id=" + "collection_id=" + wi.getCollection().getID() + "eperson_id="
+ e.getID())); + e.getID()));
logWorkflowEvent(c, wsi.getItem(), wi, e, WFSTATE_SUBMIT, null, wsi.getCollection(), oldState, null);
return wsi; return wsi;
} }

View File

@@ -19,6 +19,8 @@ import org.dspace.eperson.Group;
import org.dspace.handle.HandleManager; import org.dspace.handle.HandleManager;
import org.dspace.storage.rdbms.DatabaseManager; import org.dspace.storage.rdbms.DatabaseManager;
import org.dspace.storage.rdbms.TableRow; import org.dspace.storage.rdbms.TableRow;
import org.dspace.usage.UsageWorkflowEvent;
import org.dspace.utils.DSpace;
import org.dspace.xmlworkflow.state.Step; import org.dspace.xmlworkflow.state.Step;
import org.dspace.xmlworkflow.state.Workflow; import org.dspace.xmlworkflow.state.Workflow;
import org.dspace.xmlworkflow.state.actions.*; import org.dspace.xmlworkflow.state.actions.*;
@@ -144,6 +146,9 @@ public class XmlWorkflowManager {
// record the start of the workflow w/provenance message // record the start of the workflow w/provenance message
recordStart(wfi.getItem(), firstActionConfig.getProcessingAction()); recordStart(wfi.getItem(), firstActionConfig.getProcessingAction());
//Fire an event !
logWorkflowEvent(context, firstStep.getWorkflow().getID(), null, null, wfi, null, firstStep, firstActionConfig);
//If we don't have a UI activate it //If we don't have a UI activate it
if(!firstActionConfig.requiresUI()){ if(!firstActionConfig.requiresUI()){
ActionResult outcome = firstActionConfig.getProcessingAction().execute(context, wfi, firstStep, null); ActionResult outcome = firstActionConfig.getProcessingAction().execute(context, wfi, firstStep, null);
@@ -185,55 +190,81 @@ public class XmlWorkflowManager {
return null; return null;
}else }else
if (currentOutcome.getType() == ActionResult.TYPE.TYPE_OUTCOME) { if (currentOutcome.getType() == ActionResult.TYPE.TYPE_OUTCOME) {
//We have completed our action search & retrieve the next action Step nextStep = null;
WorkflowActionConfig nextActionConfig = null; WorkflowActionConfig nextActionConfig = null;
if(currentOutcome.getResult() == ActionResult.OUTCOME_COMPLETE){ try {
nextActionConfig = currentStep.getNextAction(currentActionConfig); //We have completed our action search & retrieve the next action
} if(currentOutcome.getResult() == ActionResult.OUTCOME_COMPLETE){
nextActionConfig = currentStep.getNextAction(currentActionConfig);
if (nextActionConfig != null) {
nextActionConfig.getProcessingAction().activate(c, wfi);
if (nextActionConfig.requiresUI() && !enteredNewStep) {
createOwnedTask(c, wfi, currentStep, nextActionConfig, user);
return nextActionConfig;
} else if( nextActionConfig.requiresUI() && enteredNewStep){
//We have entered a new step and have encountered a UI, return null since the current user doesn't have anything to do with this
c.restoreAuthSystemState();
return null;
} else {
ActionResult newOutcome = nextActionConfig.getProcessingAction().execute(c, wfi, currentStep, null);
return processOutcome(c, user, workflow, currentStep, nextActionConfig, newOutcome, wfi, enteredNewStep);
} }
}else
if(enteredNewStep){
// If the user finished his/her step, we keep processing until there is a UI step action or no step at all
Step nextStep = workflow.getNextStep(c, wfi, currentStep, currentOutcome.getResult());
c.turnOffAuthorisationSystem();
return processNextStep(c, user, workflow, currentOutcome, wfi, nextStep);
} else {
//
ClaimedTask task = ClaimedTask.findByWorkflowIdAndEPerson(c, wfi.getID(), user.getID());
//Check if we have a task for this action (might not be the case with automatic steps) if (nextActionConfig != null) {
//First add it to our list of finished users, since no more actions remain //We remain in the current step since an action is found
WorkflowRequirementsManager.addFinishedUser(c, wfi, user); nextStep = currentStep;
c.turnOffAuthorisationSystem(); nextActionConfig.getProcessingAction().activate(c, wfi);
//Check if our requirements have been met if (nextActionConfig.requiresUI() && !enteredNewStep) {
if((currentStep.isFinished(c, wfi) && currentOutcome.getResult() == ActionResult.OUTCOME_COMPLETE) || currentOutcome.getResult() != ActionResult.OUTCOME_COMPLETE){ createOwnedTask(c, wfi, currentStep, nextActionConfig, user);
//Delete all the table rows containing the users who performed this task return nextActionConfig;
WorkflowRequirementsManager.clearInProgressUsers(c, wfi); } else if( nextActionConfig.requiresUI() && enteredNewStep){
//Remove all the tasks //We have entered a new step and have encountered a UI, return null since the current user doesn't have anything to do with this
XmlWorkflowManager.deleteAllTasks(c, wfi); c.restoreAuthSystemState();
return null;
} else {
ActionResult newOutcome = nextActionConfig.getProcessingAction().execute(c, wfi, currentStep, null);
return processOutcome(c, user, workflow, currentStep, nextActionConfig, newOutcome, wfi, enteredNewStep);
}
}else
if(enteredNewStep){
// If the user finished his/her step, we keep processing until there is a UI step action or no step at all
nextStep = workflow.getNextStep(c, wfi, currentStep, currentOutcome.getResult());
c.turnOffAuthorisationSystem();
nextActionConfig = processNextStep(c, user, workflow, currentOutcome, wfi, nextStep);
//If we require a user interface return null so that the user is redirected to the "submissions page"
if(nextActionConfig == null || nextActionConfig.requiresUI()){
return null;
}else{
return nextActionConfig;
}
} else {
ClaimedTask task = ClaimedTask.findByWorkflowIdAndEPerson(c, wfi.getID(), user.getID());
//Check if we have a task for this action (might not be the case with automatic steps)
//First add it to our list of finished users, since no more actions remain
WorkflowRequirementsManager.addFinishedUser(c, wfi, user);
c.turnOffAuthorisationSystem();
//Check if our requirements have been met
if((currentStep.isFinished(c, wfi) && currentOutcome.getResult() == ActionResult.OUTCOME_COMPLETE) || currentOutcome.getResult() != ActionResult.OUTCOME_COMPLETE){
//Delete all the table rows containing the users who performed this task
WorkflowRequirementsManager.clearInProgressUsers(c, wfi);
//Remove all the tasks
XmlWorkflowManager.deleteAllTasks(c, wfi);
Step nextStep = workflow.getNextStep(c, wfi, currentStep, currentOutcome.getResult()); nextStep = workflow.getNextStep(c, wfi, currentStep, currentOutcome.getResult());
return processNextStep(c, user, workflow, currentOutcome, wfi, nextStep); nextActionConfig = processNextStep(c, user, workflow, currentOutcome, wfi, nextStep);
}else{ //If we require a user interface return null so that the user is redirected to the "submissions page"
//We are done with our actions so go to the submissions page but remove action ClaimedAction first if(nextActionConfig == null || nextActionConfig.requiresUI()){
deleteClaimedTask(c, wfi, task); return null;
c.restoreAuthSystemState(); }else{
return null; return nextActionConfig;
}
}else{
//We are done with our actions so go to the submissions page but remove action ClaimedAction first
deleteClaimedTask(c, wfi, task);
c.restoreAuthSystemState();
nextStep = currentStep;
nextActionConfig = currentActionConfig;
return null;
}
}
}catch (Exception e){
log.error("error while processing workflow outcome", e);
e.printStackTrace();
}
finally {
if((nextStep != null && nextActionConfig != null) || wfi.getItem().isArchived()){
logWorkflowEvent(c, currentStep.getWorkflow().getID(), currentStep.getId(), currentActionConfig.getId(), wfi, user, nextStep, nextActionConfig);
} }
} }
@@ -243,6 +274,51 @@ public class XmlWorkflowManager {
throw new WorkflowException("Invalid step outcome"); throw new WorkflowException("Invalid step outcome");
} }
protected static void logWorkflowEvent(Context c, String workflowId, String previousStepId, String previousActionConfigId, XmlWorkflowItem wfi, EPerson actor, Step newStep, WorkflowActionConfig newActionConfig) throws SQLException {
try {
//Fire an event so we can log our action !
Item item = wfi.getItem();
Collection myCollection = wfi.getCollection();
String workflowStepString = null;
List<EPerson> currentEpersonOwners = new ArrayList<EPerson>();
List<Group> currentGroupOwners = new ArrayList<Group>();
//These are only null if our item is sent back to the submission
if(newStep != null && newActionConfig != null){
workflowStepString = workflowId + "." + newStep.getId() + "." + newActionConfig.getId();
//Retrieve the current owners of the task
List<ClaimedTask> claimedTasks = ClaimedTask.find(c, wfi.getID(), newStep.getId());
List<PoolTask> pooledTasks = PoolTask.find(c, wfi);
for (PoolTask poolTask : pooledTasks){
if(poolTask.getEpersonID() != -1){
currentEpersonOwners.add(EPerson.find(c, poolTask.getEpersonID()));
}else{
currentGroupOwners.add(Group.find(c, poolTask.getGroupID()));
}
}
for (ClaimedTask claimedTask : claimedTasks) {
currentEpersonOwners.add(EPerson.find(c, claimedTask.getOwnerID()));
}
}
String previousWorkflowStepString = null;
if(previousStepId != null && previousActionConfigId != null){
previousWorkflowStepString = workflowId + "." + previousStepId + "." + previousActionConfigId;
}
//Fire our usage event !
UsageWorkflowEvent usageWorkflowEvent = new UsageWorkflowEvent(c, item, wfi, workflowStepString, previousWorkflowStepString, myCollection, actor);
usageWorkflowEvent.setEpersonOwners(currentEpersonOwners.toArray(new EPerson[currentEpersonOwners.size()]));
usageWorkflowEvent.setGroupOwners(currentGroupOwners.toArray(new Group[currentGroupOwners.size()]));
new DSpace().getEventService().fireEvent(usageWorkflowEvent);
} catch (Exception e) {
//Catch all errors we do not want our workflow to crash because the logging threw an exception
log.error(LogManager.getHeader(c, "Error while logging workflow event", "Workflow Item: " + wfi.getID()), e);
}
}
private static WorkflowActionConfig processNextStep(Context c, EPerson user, Workflow workflow, ActionResult currentOutcome, XmlWorkflowItem wfi, Step nextStep) throws SQLException, IOException, AuthorizeException, WorkflowException, WorkflowConfigurationException { private static WorkflowActionConfig processNextStep(Context c, EPerson user, Workflow workflow, ActionResult currentOutcome, XmlWorkflowItem wfi, Step nextStep) throws SQLException, IOException, AuthorizeException, WorkflowException, WorkflowConfigurationException {
WorkflowActionConfig nextActionConfig; WorkflowActionConfig nextActionConfig;
if(nextStep!=null){ if(nextStep!=null){
@@ -253,7 +329,7 @@ public class XmlWorkflowManager {
if (nextActionConfig.requiresUI()) { if (nextActionConfig.requiresUI()) {
//Since a new step has been started, stop executing actions once one with a user interface is present. //Since a new step has been started, stop executing actions once one with a user interface is present.
c.restoreAuthSystemState(); c.restoreAuthSystemState();
return null; return nextActionConfig;
} else { } else {
ActionResult newOutcome = nextActionConfig.getProcessingAction().execute(c, wfi, nextStep, null); ActionResult newOutcome = nextActionConfig.getProcessingAction().execute(c, wfi, nextStep, null);
c.restoreAuthSystemState(); c.restoreAuthSystemState();
@@ -581,6 +657,18 @@ public class XmlWorkflowManager {
String rejection_message) throws SQLException, AuthorizeException, String rejection_message) throws SQLException, AuthorizeException,
IOException IOException
{ {
String workflowID = null;
String currentStepId = null;
String currentActionConfigId = null;
ClaimedTask claimedTask = ClaimedTask.findByWorkflowIdAndEPerson(c, wi.getID(), e.getID());
if(claimedTask != null){
//Log it
workflowID = claimedTask.getWorkflowID();
currentStepId = claimedTask.getStepID();
currentActionConfigId = claimedTask.getActionID();
}
// authorize a DSpaceActions.REJECT // authorize a DSpaceActions.REJECT
// stop workflow // stop workflow
deleteAllTasks(c, wi); deleteAllTasks(c, wi);
@@ -627,6 +715,7 @@ public class XmlWorkflowManager {
+ "collection_id=" + wi.getCollection().getID() + "eperson_id=" + "collection_id=" + wi.getCollection().getID() + "eperson_id="
+ e.getID())); + e.getID()));
logWorkflowEvent(c, workflowID, currentStepId, currentActionConfigId, wi, e, null, null);
c.restoreAuthSystemState(); c.restoreAuthSystemState();
return wsi; return wsi;

View File

@@ -29,12 +29,16 @@
<dependency> <dependency>
<groupId>org.apache.solr</groupId> <groupId>org.apache.solr</groupId>
<artifactId>solr-solrj</artifactId> <artifactId>solr-solrj</artifactId>
<version>3.3.0</version> <version>3.5.0</version>
<exclusions> <exclusions>
<exclusion> <exclusion>
<groupId>org.slf4j</groupId> <groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId> <artifactId>slf4j-api</artifactId>
</exclusion> </exclusion>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>jcl-over-slf4j</artifactId>
</exclusion>
</exclusions> </exclusions>
</dependency> </dependency>

View File

@@ -0,0 +1,39 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.xmlui.aspect.discovery;
import org.apache.cocoon.environment.Request;
import org.dspace.app.xmlui.cocoon.SearchLoggerAction;
import org.dspace.app.xmlui.utils.ContextUtil;
import org.dspace.core.Context;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/**
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class DiscoverySearchLoggerAction extends SearchLoggerAction {
@Override
protected List<String> getQueries(Request request) throws SQLException {
Context context = ContextUtil.obtainContext(request);
List<String> queries = new ArrayList<String>();
if(request.getParameter("query") != null){
queries.add(request.getParameter("query"));
}
queries.addAll(Arrays.asList(DiscoveryUIUtils.getFilterQueries(request, context)));
return queries;
}
}

View File

@@ -44,6 +44,9 @@ and searching the repository.
<map:transformer name="RestrictedItem" src="org.dspace.app.xmlui.aspect.artifactbrowser.RestrictedItem"/> <map:transformer name="RestrictedItem" src="org.dspace.app.xmlui.aspect.artifactbrowser.RestrictedItem"/>
</map:transformers> </map:transformers>
<map:actions>
<map:action name="DiscoverySearchLoggerAction" src="org.dspace.app.xmlui.aspect.discovery.DiscoverySearchLoggerAction"/>
</map:actions>
<map:matchers default="wildcard"> <map:matchers default="wildcard">
<map:matcher name="HandleTypeMatcher" src="org.dspace.app.xmlui.aspect.general.HandleTypeMatcher"/> <map:matcher name="HandleTypeMatcher" src="org.dspace.app.xmlui.aspect.general.HandleTypeMatcher"/>
@@ -97,6 +100,7 @@ and searching the repository.
<!-- Search --> <!-- Search -->
<map:match pattern="discover"> <map:match pattern="discover">
<map:act type="DiscoverySearchLoggerAction"/>
<map:transform type="SidebarFacetsTransformer"/> <map:transform type="SidebarFacetsTransformer"/>
<map:transform type="SimpleSearch"/> <map:transform type="SimpleSearch"/>
<map:transform type="IncludePageMeta"> <map:transform type="IncludePageMeta">
@@ -133,6 +137,7 @@ and searching the repository.
<!-- Simple search --> <!-- Simple search -->
<map:match pattern="handle/*/*/discover"> <map:match pattern="handle/*/*/discover">
<map:act type="DiscoverySearchLoggerAction"/>
<map:transform type="SidebarFacetsTransformer"/> <map:transform type="SidebarFacetsTransformer"/>
<map:transform type="SimpleSearch"/> <map:transform type="SimpleSearch"/>
<map:transform type="IncludePageMeta"> <map:transform type="IncludePageMeta">

View File

@@ -0,0 +1,32 @@
/*
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
(function ($) {
/**
* Function ensures that all the links clicked in our results pass through the internal logging mechanism
*/
$(document).ready(function() {
//Retrieve all links with handles attached (comm/coll/item links)
var urls = $('div#aspect_discovery_SimpleSearch_div_search-results').find('a');
urls.click(function(){
var $this = $(this);
//Instead of redirecting us to the page, first send us to the statistics logger
//By doing this we ensure that we register the query to the result
var form = $('form#aspect_discovery_SimpleSearch_div_main-form');
form.attr('action', form.attr('action').replace('/discover', '') + '/dso-display');
//Manipulate the fq boxes to all switch to query since the logging doesn't take into account filter queries
form.find('input[name="fq"]').attr('name', 'query');
form.find('input[name="redirectUrl"]').val($this.attr('href'));
form.submit();
return false;
});
});
})(jQuery);

View File

@@ -1,338 +1,338 @@
/** /**
* The contents of this file are subject to the license and copyright * The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source * detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at * tree and available online at
* *
* http://www.dspace.org/license/ * http://www.dspace.org/license/
*/ */
package org.dspace.app.webui.servlet; package org.dspace.app.webui.servlet;
import java.io.IOException; import java.io.IOException;
import java.util.List; import java.util.List;
import java.sql.SQLException; import java.sql.SQLException;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpServletResponse;
import javax.servlet.ServletException; import javax.servlet.ServletException;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;
import org.dspace.core.ConfigurationManager; import org.dspace.core.ConfigurationManager;
import org.dspace.core.Constants; import org.dspace.core.Constants;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.eperson.Group; import org.dspace.eperson.Group;
import org.dspace.content.DSpaceObject; import org.dspace.content.DSpaceObject;
import org.dspace.handle.HandleManager; import org.dspace.handle.HandleManager;
import org.dspace.statistics.Dataset; import org.dspace.statistics.Dataset;
import org.dspace.statistics.content.DatasetDSpaceObjectGenerator; import org.dspace.statistics.content.DatasetDSpaceObjectGenerator;
import org.dspace.statistics.content.DatasetTimeGenerator; import org.dspace.statistics.content.DatasetTimeGenerator;
import org.dspace.statistics.content.DatasetTypeGenerator; import org.dspace.statistics.content.DatasetTypeGenerator;
import org.dspace.statistics.content.StatisticsDataVisits; import org.dspace.statistics.content.StatisticsDataVisits;
import org.dspace.statistics.content.StatisticsListing; import org.dspace.statistics.content.StatisticsListing;
import org.dspace.statistics.content.StatisticsTable; import org.dspace.statistics.content.StatisticsTable;
import org.dspace.app.webui.components.StatisticsBean; import org.dspace.app.webui.components.StatisticsBean;
import org.dspace.app.webui.util.JSPManager; import org.dspace.app.webui.util.JSPManager;
/** /**
* *
* *
* @author Kim Shepherd * @author Kim Shepherd
* @version $Revision: 4386 $ * @version $Revision: 4386 $
*/ */
public class DisplayStatisticsServlet extends DSpaceServlet public class DisplayStatisticsServlet extends DSpaceServlet
{ {
/** log4j logger */ /** log4j logger */
private static Logger log = Logger.getLogger(DisplayStatisticsServlet.class); private static Logger log = Logger.getLogger(DisplayStatisticsServlet.class);
protected void doDSGet(Context context, HttpServletRequest request, protected void doDSGet(Context context, HttpServletRequest request,
HttpServletResponse response) throws ServletException, IOException, HttpServletResponse response) throws ServletException, IOException,
SQLException, AuthorizeException SQLException, AuthorizeException
{ {
// is the statistics data publically viewable? // is the statistics data publically viewable?
boolean privatereport = ConfigurationManager.getBooleanProperty("usage-statistics", "authorization.admin"); boolean privatereport = ConfigurationManager.getBooleanProperty("usage-statistics", "authorization.admin");
// is the user a member of the Administrator (1) group? // is the user a member of the Administrator (1) group?
boolean admin = Group.isMember(context, 1); boolean admin = Group.isMember(context, 1);
if (!privatereport || admin) if (!privatereport || admin)
{ {
displayStatistics(context, request, response); displayStatistics(context, request, response);
} }
else else
{ {
throw new AuthorizeException(); throw new AuthorizeException();
} }
} }
protected void displayStatistics(Context context, HttpServletRequest request, protected void displayStatistics(Context context, HttpServletRequest request,
HttpServletResponse response) throws ServletException, IOException, HttpServletResponse response) throws ServletException, IOException,
SQLException, AuthorizeException SQLException, AuthorizeException
{ {
DSpaceObject dso = null; DSpaceObject dso = null;
String handle = request.getParameter("handle"); String handle = request.getParameter("handle");
if("".equals(handle) || handle == null) if("".equals(handle) || handle == null)
{ {
// We didn't get passed a handle parameter. // We didn't get passed a handle parameter.
// That means we're looking at /handle/*/*/statistics // That means we're looking at /handle/*/*/statistics
// with handle injected as attribute from HandleServlet // with handle injected as attribute from HandleServlet
handle = (String) request.getAttribute("handle"); handle = (String) request.getAttribute("handle");
} }
if(handle != null) if(handle != null)
{ {
dso = HandleManager.resolveToObject(context, handle); dso = HandleManager.resolveToObject(context, handle);
} }
if(dso == null) if(dso == null)
{ {
response.setStatus(HttpServletResponse.SC_NOT_FOUND); response.setStatus(HttpServletResponse.SC_NOT_FOUND);
JSPManager.showJSP(request, response, "/error/404.jsp"); JSPManager.showJSP(request, response, "/error/404.jsp");
return; return;
} }
boolean isItem = false; boolean isItem = false;
StatisticsBean statsVisits = new StatisticsBean(); StatisticsBean statsVisits = new StatisticsBean();
StatisticsBean statsMonthlyVisits = new StatisticsBean(); StatisticsBean statsMonthlyVisits = new StatisticsBean();
StatisticsBean statsFileDownloads = new StatisticsBean(); StatisticsBean statsFileDownloads = new StatisticsBean();
StatisticsBean statsCountryVisits = new StatisticsBean(); StatisticsBean statsCountryVisits = new StatisticsBean();
StatisticsBean statsCityVisits = new StatisticsBean(); StatisticsBean statsCityVisits = new StatisticsBean();
try try
{ {
StatisticsListing statListing = new StatisticsListing( StatisticsListing statListing = new StatisticsListing(
new StatisticsDataVisits(dso)); new StatisticsDataVisits(dso));
statListing.setTitle("Total Visits"); statListing.setTitle("Total Visits");
statListing.setId("list1"); statListing.setId("list1");
DatasetDSpaceObjectGenerator dsoAxis = new DatasetDSpaceObjectGenerator(); DatasetDSpaceObjectGenerator dsoAxis = new DatasetDSpaceObjectGenerator();
dsoAxis.addDsoChild(dso.getType(), 10, false, -1); dsoAxis.addDsoChild(dso.getType(), 10, false, -1);
statListing.addDatasetGenerator(dsoAxis); statListing.addDatasetGenerator(dsoAxis);
Dataset dataset = statListing.getDataset(context); Dataset dataset = statListing.getDataset(context);
dataset = statListing.getDataset(); dataset = statListing.getDataset();
if (dataset == null) if (dataset == null)
{ {
dataset = statListing.getDataset(context); dataset = statListing.getDataset(context);
} }
if (dataset != null) if (dataset != null)
{ {
String[][] matrix = dataset.getMatrixFormatted(); String[][] matrix = dataset.getMatrix();
List<String> colLabels = dataset.getColLabels(); List<String> colLabels = dataset.getColLabels();
List<String> rowLabels = dataset.getRowLabels(); List<String> rowLabels = dataset.getRowLabels();
statsVisits.setMatrix(matrix); statsVisits.setMatrix(matrix);
statsVisits.setColLabels(colLabels); statsVisits.setColLabels(colLabels);
statsVisits.setRowLabels(rowLabels); statsVisits.setRowLabels(rowLabels);
} }
} catch (Exception e) } catch (Exception e)
{ {
log.error( log.error(
"Error occured while creating statistics for dso with ID: " "Error occured while creating statistics for dso with ID: "
+ dso.getID() + " and type " + dso.getType() + dso.getID() + " and type " + dso.getType()
+ " and handle: " + dso.getHandle(), e); + " and handle: " + dso.getHandle(), e);
} }
try try
{ {
StatisticsTable statisticsTable = new StatisticsTable(new StatisticsDataVisits(dso)); StatisticsTable statisticsTable = new StatisticsTable(new StatisticsDataVisits(dso));
statisticsTable.setTitle("Total Visits Per Month"); statisticsTable.setTitle("Total Visits Per Month");
statisticsTable.setId("tab1"); statisticsTable.setId("tab1");
DatasetTimeGenerator timeAxis = new DatasetTimeGenerator(); DatasetTimeGenerator timeAxis = new DatasetTimeGenerator();
timeAxis.setDateInterval("month", "-6", "+1"); timeAxis.setDateInterval("month", "-6", "+1");
statisticsTable.addDatasetGenerator(timeAxis); statisticsTable.addDatasetGenerator(timeAxis);
DatasetDSpaceObjectGenerator dsoAxis = new DatasetDSpaceObjectGenerator(); DatasetDSpaceObjectGenerator dsoAxis = new DatasetDSpaceObjectGenerator();
dsoAxis.addDsoChild(dso.getType(), 10, false, -1); dsoAxis.addDsoChild(dso.getType(), 10, false, -1);
statisticsTable.addDatasetGenerator(dsoAxis); statisticsTable.addDatasetGenerator(dsoAxis);
Dataset dataset = statisticsTable.getDataset(context); Dataset dataset = statisticsTable.getDataset(context);
dataset = statisticsTable.getDataset(); dataset = statisticsTable.getDataset();
if (dataset == null) if (dataset == null)
{ {
dataset = statisticsTable.getDataset(context); dataset = statisticsTable.getDataset(context);
} }
if (dataset != null) if (dataset != null)
{ {
String[][] matrix = dataset.getMatrixFormatted(); String[][] matrix = dataset.getMatrix();
List<String> colLabels = dataset.getColLabels(); List<String> colLabels = dataset.getColLabels();
List<String> rowLabels = dataset.getRowLabels(); List<String> rowLabels = dataset.getRowLabels();
statsMonthlyVisits.setMatrix(matrix); statsMonthlyVisits.setMatrix(matrix);
statsMonthlyVisits.setColLabels(colLabels); statsMonthlyVisits.setColLabels(colLabels);
statsMonthlyVisits.setRowLabels(rowLabels); statsMonthlyVisits.setRowLabels(rowLabels);
} }
} catch (Exception e) } catch (Exception e)
{ {
log.error( log.error(
"Error occured while creating statistics for dso with ID: " "Error occured while creating statistics for dso with ID: "
+ dso.getID() + " and type " + dso.getType() + dso.getID() + " and type " + dso.getType()
+ " and handle: " + dso.getHandle(), e); + " and handle: " + dso.getHandle(), e);
} }
if(dso instanceof org.dspace.content.Item) if(dso instanceof org.dspace.content.Item)
{ {
isItem = true; isItem = true;
try try
{ {
StatisticsListing statisticsTable = new StatisticsListing(new StatisticsDataVisits(dso)); StatisticsListing statisticsTable = new StatisticsListing(new StatisticsDataVisits(dso));
statisticsTable.setTitle("File Downloads"); statisticsTable.setTitle("File Downloads");
statisticsTable.setId("tab1"); statisticsTable.setId("tab1");
DatasetDSpaceObjectGenerator dsoAxis = new DatasetDSpaceObjectGenerator(); DatasetDSpaceObjectGenerator dsoAxis = new DatasetDSpaceObjectGenerator();
dsoAxis.addDsoChild(Constants.BITSTREAM, 10, false, -1); dsoAxis.addDsoChild(Constants.BITSTREAM, 10, false, -1);
statisticsTable.addDatasetGenerator(dsoAxis); statisticsTable.addDatasetGenerator(dsoAxis);
Dataset dataset = statisticsTable.getDataset(context); Dataset dataset = statisticsTable.getDataset(context);
dataset = statisticsTable.getDataset(); dataset = statisticsTable.getDataset();
if (dataset == null) if (dataset == null)
{ {
dataset = statisticsTable.getDataset(context); dataset = statisticsTable.getDataset(context);
} }
if (dataset != null) if (dataset != null)
{ {
String[][] matrix = dataset.getMatrixFormatted(); String[][] matrix = dataset.getMatrix();
List<String> colLabels = dataset.getColLabels(); List<String> colLabels = dataset.getColLabels();
List<String> rowLabels = dataset.getRowLabels(); List<String> rowLabels = dataset.getRowLabels();
statsFileDownloads.setMatrix(matrix); statsFileDownloads.setMatrix(matrix);
statsFileDownloads.setColLabels(colLabels); statsFileDownloads.setColLabels(colLabels);
statsFileDownloads.setRowLabels(rowLabels); statsFileDownloads.setRowLabels(rowLabels);
} }
} }
catch (Exception e) catch (Exception e)
{ {
log.error( log.error(
"Error occured while creating statistics for dso with ID: " "Error occured while creating statistics for dso with ID: "
+ dso.getID() + " and type " + dso.getType() + dso.getID() + " and type " + dso.getType()
+ " and handle: " + dso.getHandle(), e); + " and handle: " + dso.getHandle(), e);
} }
} }
try try
{ {
StatisticsListing statisticsTable = new StatisticsListing(new StatisticsDataVisits(dso)); StatisticsListing statisticsTable = new StatisticsListing(new StatisticsDataVisits(dso));
statisticsTable.setTitle("Top country views"); statisticsTable.setTitle("Top country views");
statisticsTable.setId("tab1"); statisticsTable.setId("tab1");
DatasetTypeGenerator typeAxis = new DatasetTypeGenerator(); DatasetTypeGenerator typeAxis = new DatasetTypeGenerator();
typeAxis.setType("countryCode"); typeAxis.setType("countryCode");
typeAxis.setMax(10); typeAxis.setMax(10);
statisticsTable.addDatasetGenerator(typeAxis); statisticsTable.addDatasetGenerator(typeAxis);
Dataset dataset = statisticsTable.getDataset(context); Dataset dataset = statisticsTable.getDataset(context);
dataset = statisticsTable.getDataset(); dataset = statisticsTable.getDataset();
if (dataset == null) if (dataset == null)
{ {
dataset = statisticsTable.getDataset(context); dataset = statisticsTable.getDataset(context);
} }
if (dataset != null) if (dataset != null)
{ {
String[][] matrix = dataset.getMatrixFormatted(); String[][] matrix = dataset.getMatrix();
List<String> colLabels = dataset.getColLabels(); List<String> colLabels = dataset.getColLabels();
List<String> rowLabels = dataset.getRowLabels(); List<String> rowLabels = dataset.getRowLabels();
statsCountryVisits.setMatrix(matrix); statsCountryVisits.setMatrix(matrix);
statsCountryVisits.setColLabels(colLabels); statsCountryVisits.setColLabels(colLabels);
statsCountryVisits.setRowLabels(rowLabels); statsCountryVisits.setRowLabels(rowLabels);
} }
} }
catch (Exception e) catch (Exception e)
{ {
log.error( log.error(
"Error occured while creating statistics for dso with ID: " "Error occured while creating statistics for dso with ID: "
+ dso.getID() + " and type " + dso.getType() + dso.getID() + " and type " + dso.getType()
+ " and handle: " + dso.getHandle(), e); + " and handle: " + dso.getHandle(), e);
} }
try try
{ {
StatisticsListing statisticsTable = new StatisticsListing(new StatisticsDataVisits(dso)); StatisticsListing statisticsTable = new StatisticsListing(new StatisticsDataVisits(dso));
statisticsTable.setTitle("Top city views"); statisticsTable.setTitle("Top city views");
statisticsTable.setId("tab1"); statisticsTable.setId("tab1");
DatasetTypeGenerator typeAxis = new DatasetTypeGenerator(); DatasetTypeGenerator typeAxis = new DatasetTypeGenerator();
typeAxis.setType("city"); typeAxis.setType("city");
typeAxis.setMax(10); typeAxis.setMax(10);
statisticsTable.addDatasetGenerator(typeAxis); statisticsTable.addDatasetGenerator(typeAxis);
Dataset dataset = statisticsTable.getDataset(context); Dataset dataset = statisticsTable.getDataset(context);
dataset = statisticsTable.getDataset(); dataset = statisticsTable.getDataset();
if (dataset == null) if (dataset == null)
{ {
dataset = statisticsTable.getDataset(context); dataset = statisticsTable.getDataset(context);
} }
if (dataset != null) if (dataset != null)
{ {
String[][] matrix = dataset.getMatrixFormatted(); String[][] matrix = dataset.getMatrix();
List<String> colLabels = dataset.getColLabels(); List<String> colLabels = dataset.getColLabels();
List<String> rowLabels = dataset.getRowLabels(); List<String> rowLabels = dataset.getRowLabels();
statsCityVisits.setMatrix(matrix); statsCityVisits.setMatrix(matrix);
statsCityVisits.setColLabels(colLabels); statsCityVisits.setColLabels(colLabels);
statsCityVisits.setRowLabels(rowLabels); statsCityVisits.setRowLabels(rowLabels);
} }
} }
catch (Exception e) catch (Exception e)
{ {
log.error( log.error(
"Error occured while creating statistics for dso with ID: " "Error occured while creating statistics for dso with ID: "
+ dso.getID() + " and type " + dso.getType() + dso.getID() + " and type " + dso.getType()
+ " and handle: " + dso.getHandle(), e); + " and handle: " + dso.getHandle(), e);
} }
request.setAttribute("statsVisits", statsVisits); request.setAttribute("statsVisits", statsVisits);
request.setAttribute("statsMonthlyVisits", statsMonthlyVisits); request.setAttribute("statsMonthlyVisits", statsMonthlyVisits);
request.setAttribute("statsFileDownloads", statsFileDownloads); request.setAttribute("statsFileDownloads", statsFileDownloads);
request.setAttribute("statsCountryVisits",statsCountryVisits); request.setAttribute("statsCountryVisits",statsCountryVisits);
request.setAttribute("statsCityVisits", statsCityVisits); request.setAttribute("statsCityVisits", statsCityVisits);
request.setAttribute("isItem", isItem); request.setAttribute("isItem", isItem);
JSPManager.showJSP(request, response, "display-statistics.jsp"); JSPManager.showJSP(request, response, "display-statistics.jsp");
} }
} }

View File

@@ -0,0 +1,74 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.webui.servlet;
import org.apache.commons.lang.StringUtils;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject;
import org.dspace.core.Context;
import org.dspace.handle.HandleManager;
import org.dspace.usage.UsageEvent;
import org.dspace.usage.UsageSearchEvent;
import org.dspace.utils.DSpace;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Arrays;
/**
* Every time a user clicks on a search result he will be redirected through this servlet
* this servlet will retrieve all query information & store this for the search statistics
* Once everything has been stored the user will be
* redirected to the dso he clicked on (indicated by the redirectUrl parameter)
*
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class SearchResultLogServlet extends DSpaceServlet{
@Override
protected void doDSPost(Context context, HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException, SQLException, AuthorizeException {
String redirectUrl = request.getParameter("redirectUrl");
String scopeHandle = request.getParameter("scope");
DSpaceObject scope = HandleManager.resolveToObject(context, scopeHandle);
String resultHandle = StringUtils.substringAfter(redirectUrl, "/handle/");
DSpaceObject result = HandleManager.resolveToObject(context, resultHandle);
//Fire an event to log our search result
UsageSearchEvent searchEvent = new UsageSearchEvent(
UsageEvent.Action.SEARCH,
request,
context,
result,
Arrays.asList(request.getParameterValues("query")), scope);
if(!StringUtils.isBlank(request.getParameter("rpp"))){
searchEvent.setRpp(Integer.parseInt(request.getParameter("rpp")));
}
if(!StringUtils.isBlank(request.getParameter("sort_by"))){
searchEvent.setSortBy(request.getParameter("sort_by"));
}
if(!StringUtils.isBlank(request.getParameter("order"))){
searchEvent.setSortOrder(request.getParameter("order"));
}
if(!StringUtils.isBlank(request.getParameter("page"))){
searchEvent.setPage(Integer.parseInt(request.getParameter("page")));
}
new DSpace().getEventService().fireEvent(
searchEvent);
response.sendRedirect(redirectUrl);
}
}

View File

@@ -428,6 +428,11 @@
<servlet-class>org.dspace.app.webui.servlet.AuthorityChooseServlet</servlet-class> <servlet-class>org.dspace.app.webui.servlet.AuthorityChooseServlet</servlet-class>
</servlet> </servlet>
<servlet>
<servlet-name>SearchResultLogServlet</servlet-name>
<servlet-class>org.dspace.app.webui.servlet.SearchResultLogServlet</servlet-class>
</servlet>
<!-- shibbolized dspace --> <!-- shibbolized dspace -->
<servlet> <servlet>
<servlet-name>shibboleth-login</servlet-name> <servlet-name>shibboleth-login</servlet-name>
@@ -722,6 +727,11 @@
<url-pattern>/json/*</url-pattern> <url-pattern>/json/*</url-pattern>
</servlet-mapping> </servlet-mapping>
<servlet-mapping>
<servlet-name>SearchResultLogServlet</servlet-name>
<url-pattern>/dso-display</url-pattern>
</servlet-mapping>
<!-- Icon MIME type --> <!-- Icon MIME type -->
<mime-mapping> <mime-mapping>
<extension>ico</extension> <extension>ico</extension>

View File

@@ -92,6 +92,10 @@
<dspace:layout titlekey="jsp.search.results.title"> <dspace:layout titlekey="jsp.search.results.title">
<script type="text/javascript" src="<%= request.getContextPath() %>/static/js/jquery/jquery-1.6.2.min.js"> </script>
<script type="text/javascript" src="<%= request.getContextPath() %>/static/js/search-results.js"> </script>
<%-- <h1>Search Results</h1> --%> <%-- <h1>Search Results</h1> --%>
<h1><fmt:message key="jsp.search.results.title"/></h1> <h1><fmt:message key="jsp.search.results.title"/></h1>
@@ -293,6 +297,10 @@ else
</form> </form>
</div> </div>
<%
if(0 < communities.length || 0 < collections.length || 0 < items.length){
%>
<div id="search-results-division">
<% if (communities.length > 0 ) { %> <% if (communities.length > 0 ) { %>
<%-- <h3>Community Hits:</h3> --%> <%-- <h3>Community Hits:</h3> --%>
<h3><fmt:message key="jsp.search.results.comhits"/></h3> <h3><fmt:message key="jsp.search.results.comhits"/></h3>
@@ -312,6 +320,10 @@ else
<h3><fmt:message key="jsp.search.results.itemhits"/></h3> <h3><fmt:message key="jsp.search.results.itemhits"/></h3>
<dspace:itemlist items="<%= items %>" sortOption="<%= so %>" authorLimit="<%= qResults.getEtAl() %>" /> <dspace:itemlist items="<%= items %>" sortOption="<%= so %>" authorLimit="<%= qResults.getEtAl() %>" />
<% } %> <% } %>
</div>
<%
}
%>
<p align="center"> <p align="center">
@@ -383,5 +395,15 @@ if (pageTotal > pageCurrent)
</p> </p>
<form id="dso-display" action="<%=request.getContextPath()%>/dso-display" method="post">
<input type="hidden" name="query" value="<%=query%>"/>
<input type="hidden" name="rpp" value="<%=rpp%>"/>
<input type="hidden" name="page" value="<%=pageCurrent%>"/>
<input type="hidden" name="sort_by" value="<%=(so != null ? so.getNumber() : 0)%>"/>
<input type="hidden" name="order" value="<%=order%>"/>
<input type="hidden" name="scope" value="<%=collection != null ? collection.getHandle() : (community != null ? community.getHandle() : "")%>"/>
<input type="hidden" name="redirectUrl" value=""/>
</form>
</dspace:layout> </dspace:layout>

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,32 @@
/*
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
$.noConflict();
(function ($) {
/**
* Function ensures that all the links clicked in our results pass through the internal logging mechanism
*/
$(document).ready(function() {
//Retrieve all links with handles attached (comm/coll/item links)
var urls = $('div#search-results-division').find('a');
urls.click(function(){
var $this = $(this);
//Instead of redirecting us to the page, first send us to the statistics logger
//By doing this we ensure that we register the query to the result
var form = $('form#dso-display');
form.find('input[name="redirectUrl"]').val($this.attr('href'));
form.submit();
return false;
});
});
})(jQuery);

View File

@@ -145,14 +145,19 @@
<exclusions> <exclusions>
<exclusion> <exclusion>
<groupId>org.slf4j</groupId> <groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId> <artifactId>jcl-over-slf4j</artifactId>
</exclusion> </exclusion>
<exclusion> <exclusion>
<groupId>org.slf4j</groupId> <groupId>org.slf4j</groupId>
<artifactId>jcl-over-slf4j</artifactId> <artifactId>slf4j-api</artifactId>
</exclusion> </exclusion>
</exclusions> </exclusions>
</dependency> </dependency>
<dependency>
<groupId>commons-configuration</groupId>
<artifactId>commons-configuration</artifactId>
<version>1.8</version>
</dependency>
<dependency> <dependency>
<groupId>org.dspace.dependencies</groupId> <groupId>org.dspace.dependencies</groupId>
<artifactId>dspace-geoip</artifactId> <artifactId>dspace-geoip</artifactId>

View File

@@ -18,13 +18,14 @@ import java.util.Map;
import com.Ostermiller.util.ExcelCSVPrinter; import com.Ostermiller.util.ExcelCSVPrinter;
import org.apache.commons.lang.ArrayUtils; import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang.StringUtils;
/** /**
* *
* @author kevinvandevelde at atmire.com * @author kevinvandevelde at atmire.com
* Date: 21-jan-2009 * Date: 21-jan-2009
* Time: 13:44:48 * Time: 13:44:48
* *
*/ */
public class Dataset { public class Dataset {
@@ -41,14 +42,14 @@ public class Dataset {
/* The attributes for the rows */ /* The attributes for the rows */
private List<Map<String, String>> rowLabelsAttrs; private List<Map<String, String>> rowLabelsAttrs;
/* The data in a matrix */ /* The data in a matrix */
private float[][]matrix; private String[][]matrix;
/* The format in which we format our floats */ /* The format in which we format our floats */
private String format = "0"; private String format = "0";
public Dataset(int rows, int cols){ public Dataset(int rows, int cols){
matrix = new float[rows][cols]; matrix = new String[rows][cols];
nbRows = rows; nbRows = rows;
nbCols = cols; nbCols = cols;
initColumnLabels(cols); initColumnLabels(cols);
@@ -56,7 +57,7 @@ public class Dataset {
} }
public Dataset(float[][] matrix){ public Dataset(float[][] matrix){
this.matrix = (float[][]) ArrayUtils.clone(matrix); this.matrix = (String[][]) ArrayUtils.clone(matrix);
nbRows = matrix.length; nbRows = matrix.length;
if(0 < matrix.length && 0 < matrix[0].length) if(0 < matrix.length && 0 < matrix[0].length)
{ {
@@ -146,10 +147,6 @@ public class Dataset {
return rowLabels; return rowLabels;
} }
public float[][] getMatrix() {
return (float[][]) ArrayUtils.clone(matrix);
}
public int getNbRows() { public int getNbRows() {
return nbRows; return nbRows;
} }
@@ -166,40 +163,32 @@ public class Dataset {
this.format = format; this.format = format;
} }
public String[][] getMatrixFormatted(){ public String[][] getMatrix(){
DecimalFormat decimalFormat = new DecimalFormat(format);
if (matrix.length == 0) { if (matrix.length == 0) {
return new String[0][0]; return new String[0][0];
} else { } else {
String[][] strMatrix = new String[matrix.length][matrix[0].length]; return matrix;
for (int i = 0; i < matrix.length; i++) {
for (int j = 0; j < matrix[i].length; j++) {
strMatrix[i][j] = decimalFormat.format(matrix[i][j]);
}
}
return strMatrix;
} }
} }
public void addValueToMatrix(int row, int coll, float value) { public void addValueToMatrix(int row, int coll, float value) {
matrix[row][coll] = value; DecimalFormat decimalFormat = new DecimalFormat(format);
matrix[row][coll] = decimalFormat.format(value);
} }
public void addValueToMatrix(int row, int coll, String value) throws ParseException { public void addValueToMatrix(int row, int coll, String value) throws ParseException {
DecimalFormat decimalFormat = new DecimalFormat(format); matrix[row][coll] = value;
Number number = decimalFormat.parse(value);
matrix[row][coll] = number.floatValue();
} }
/** /**
* Returns false if this dataset only contains zero's. * Returns false if this dataset only contains zero's.
*/ */
public boolean containsNonZeroValues(){ public boolean containsNonZeroValues(){
if (matrix != null) { if (matrix != null) {
for (float[] vector : matrix) { for (String[] vector : matrix) {
for (float v : vector) { for (String v : vector) {
if (v != 0) if (StringUtils.isBlank(v) || v.equals("0"))
{ {
return true; return true;
} }
@@ -215,7 +204,7 @@ public class Dataset {
//Lets make sure we at least have something to flip //Lets make sure we at least have something to flip
if(0 < matrix.length && 0 < matrix[0].length){ if(0 < matrix.length && 0 < matrix[0].length){
//Flip the data first //Flip the data first
float[][] newMatrix = new float[matrix[0].length][matrix.length]; String[][] newMatrix = new String[matrix[0].length][matrix.length];
for (int i = 0; i < matrix.length; i++) { for (int i = 0; i < matrix.length; i++) {
for (int j = 0; j < matrix[i].length; j++) { for (int j = 0; j < matrix[i].length; j++) {
newMatrix[j][i] = matrix[i][j]; newMatrix[j][i] = matrix[i][j];
@@ -258,7 +247,7 @@ public class Dataset {
ecsvp.writeln(); ecsvp.writeln();
List<String> rowLabels = getRowLabels(); List<String> rowLabels = getRowLabels();
String[][] matrix = getMatrixFormatted(); String[][] matrix = getMatrix();
for (int i = 0; i < rowLabels.size(); i++) { for (int i = 0; i < rowLabels.size(); i++) {
String rowLabel = rowLabels.get(i); String rowLabel = rowLabels.get(i);
ecsvp.write(rowLabel); ecsvp.write(rowLabel);

View File

@@ -12,6 +12,9 @@ import org.dspace.eperson.EPerson;
import org.dspace.services.model.Event; import org.dspace.services.model.Event;
import org.dspace.usage.AbstractUsageEventListener; import org.dspace.usage.AbstractUsageEventListener;
import org.dspace.usage.UsageEvent; import org.dspace.usage.UsageEvent;
import org.dspace.usage.UsageSearchEvent;
import org.dspace.usage.UsageWorkflowEvent;
import org.springframework.util.CollectionUtils;
/** /**
* Simple SolrLoggerUsageEvent facade to separate Solr specific * Simple SolrLoggerUsageEvent facade to separate Solr specific
@@ -29,12 +32,27 @@ public class SolrLoggerUsageEventListener extends AbstractUsageEventListener {
if(event instanceof UsageEvent) if(event instanceof UsageEvent)
{ {
try{ try{
UsageEvent ue = (UsageEvent)event; UsageEvent ue = (UsageEvent)event;
EPerson currentUser = ue.getContext() == null ? null : ue.getContext().getCurrentUser(); EPerson currentUser = ue.getContext() == null ? null : ue.getContext().getCurrentUser();
SolrLogger.post(ue.getObject(), ue.getRequest(), currentUser); if(UsageEvent.Action.VIEW == ue.getAction()){
SolrLogger.postView(ue.getObject(), ue.getRequest(), currentUser);
}else
if(UsageEvent.Action.SEARCH == ue.getAction()){
UsageSearchEvent usageSearchEvent = (UsageSearchEvent) ue;
//Only log if the user has already filled in a query !
if(!CollectionUtils.isEmpty(((UsageSearchEvent) ue).getQueries())){
SolrLogger.postSearch(ue.getObject(), ue.getRequest(), currentUser,
usageSearchEvent.getQueries(), usageSearchEvent.getRpp(), usageSearchEvent.getSortBy(),
usageSearchEvent.getSortOrder(), usageSearchEvent.getPage(), usageSearchEvent.getScope());
}
}else
if(UsageEvent.Action.WORKFLOW == ue.getAction()){
UsageWorkflowEvent usageWorkflowEvent = (UsageWorkflowEvent) ue;
SolrLogger.postWorkflow(usageWorkflowEvent);
}
} }
catch(Exception e) catch(Exception e)

View File

@@ -0,0 +1,56 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.statistics.content;
/**
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class DatasetSearchGenerator extends DatasetTypeGenerator {
public static enum Mode {
SEARCH_OVERVIEW ("search_overview"),
SEARCH_OVERVIEW_TOTAL ("search_overview_total");
private final String text;
Mode(String text) {
this.text = text;
}
public String text() { return text; }
}
private Mode mode;
private boolean percentage = false;
private boolean retrievePageViews;
public boolean isRetrievePageViews() {
return retrievePageViews;
}
public void setRetrievePageViews(boolean retrievePageViews) {
this.retrievePageViews = retrievePageViews;
}
public void setPercentage(boolean percentage){
this.percentage = percentage;
}
public boolean isPercentage() {
return percentage;
}
public Mode getMode() {
return mode;
}
public void setMode(Mode mode) {
this.mode = mode;
}
}

View File

@@ -0,0 +1,238 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.statistics.content;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.util.ClientUtils;
import org.dspace.content.DSpaceObject;
import org.dspace.core.Context;
import org.dspace.statistics.Dataset;
import org.dspace.statistics.ObjectCount;
import org.dspace.statistics.SolrLogger;
import org.dspace.statistics.content.filter.StatisticsFilter;
import org.dspace.utils.DSpace;
import java.io.IOException;
import java.sql.SQLException;
import java.text.DecimalFormat;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.List;
/**
* A statistics data implementation that will query the statistics backend for search information
*
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class StatisticsDataSearches extends StatisticsData {
private static final DecimalFormat pageViewFormat = new DecimalFormat("0.00");
private static final DecimalFormat percentageFormat = new DecimalFormat("0.00%");
/** Current DSpaceObject for which to generate the statistics. */
private DSpaceObject currentDso;
public StatisticsDataSearches(DSpaceObject dso) {
super();
this.currentDso = dso;
}
@Override
public Dataset createDataset(Context context) throws SQLException, SolrServerException, IOException, ParseException {
// Check if we already have one.
// If we do then give it back.
if(getDataset() != null)
{
return getDataset();
}
List<StatisticsFilter> filters = getFilters();
List<String> defaultFilters = new ArrayList<String>();
for (StatisticsFilter statisticsFilter : filters) {
defaultFilters.add(statisticsFilter.toQuery());
}
String defaultFilterQuery = StringUtils.join(defaultFilters.iterator(), " AND ");
String query = getQuery();
Dataset dataset = new Dataset(0,0);
List<DatasetGenerator> datasetGenerators = getDatasetGenerators();
if(0 < datasetGenerators.size()){
//At the moment we can only have one dataset generator
DatasetGenerator datasetGenerator = datasetGenerators.get(0);
if(datasetGenerator instanceof DatasetSearchGenerator){
DatasetSearchGenerator typeGenerator = (DatasetSearchGenerator) datasetGenerator;
if(typeGenerator.getMode() == DatasetSearchGenerator.Mode.SEARCH_OVERVIEW){
StringBuilder fqBuffer = new StringBuilder(defaultFilterQuery);
if(0 < fqBuffer.length())
{
fqBuffer.append(" AND ");
}
fqBuffer.append(getSearchFilterQuery());
ObjectCount[] topCounts = SolrLogger.queryFacetField(query, fqBuffer.toString(), typeGenerator.getType(), typeGenerator.getMax(), (typeGenerator.isPercentage() || typeGenerator.isIncludeTotal()), null);
long totalCount = -1;
if(typeGenerator.isPercentage() && 0 < topCounts.length){
//Retrieve the total required to calculate the percentage
totalCount = topCounts[topCounts.length - 1].getCount();
//Remove the total count from view !
topCounts = (ObjectCount[]) ArrayUtils.subarray(topCounts, 0, topCounts.length - 1);
}
int nrColumns = 2;
if(typeGenerator.isPercentage()){
nrColumns++;
}
if(typeGenerator.isRetrievePageViews()){
nrColumns++;
}
dataset = new Dataset(topCounts.length, nrColumns);
dataset.setColLabel(0, "search-terms");
dataset.setColLabel(1, "searches");
if(typeGenerator.isPercentage()){
dataset.setColLabel(2, "percent-total");
}
if(typeGenerator.isRetrievePageViews()){
dataset.setColLabel(3, "views-search");
}
for (int i = 0; i < topCounts.length; i++) {
ObjectCount queryCount = topCounts[i];
dataset.setRowLabel(i, String.valueOf(i + 1));
String displayedValue = queryCount.getValue();
if(new DSpace().getConfigurationService().getPropertyAsType("usage-statistics.search.statistics.unescape.queries", Boolean.TRUE)){
displayedValue = displayedValue.replace("\\", "");
}
dataset.addValueToMatrix(i, 0, displayedValue);
dataset.addValueToMatrix(i, 1, queryCount.getCount());
if(typeGenerator.isPercentage()){
//Calculate our percentage from the total !
dataset.addValueToMatrix(i, 2, percentageFormat.format(((float) queryCount.getCount() / totalCount)));
}
if(typeGenerator.isRetrievePageViews()){
String queryString = ClientUtils.escapeQueryChars(queryCount.getValue());
if(queryString.equals("")){
queryString = "\"\"";
}
ObjectCount totalPageViews = getTotalPageViews("query:" + queryString, defaultFilterQuery);
dataset.addValueToMatrix(i, 3, pageViewFormat.format((float) totalPageViews.getCount() / queryCount.getCount()));
}
}
}else
if(typeGenerator.getMode() == DatasetSearchGenerator.Mode.SEARCH_OVERVIEW_TOTAL){
//Retrieve the total counts !
ObjectCount totalCount = SolrLogger.queryTotal(query, getSearchFilterQuery());
//Retrieve the filtered count by using the default filter query
StringBuilder fqBuffer = new StringBuilder(defaultFilterQuery);
if(0 < fqBuffer.length())
{
fqBuffer.append(" AND ");
}
fqBuffer.append(getSearchFilterQuery());
ObjectCount totalFiltered = SolrLogger.queryTotal(query, fqBuffer.toString());
fqBuffer = new StringBuilder(defaultFilterQuery);
if(0 < fqBuffer.length())
{
fqBuffer.append(" AND ");
}
fqBuffer.append("statistics_type:").append(SolrLogger.StatisticsType.SEARCH_RESULT.text());
ObjectCount totalPageViews = getTotalPageViews(query, defaultFilterQuery);
dataset = new Dataset(1, 3);
dataset.setRowLabel(0, "");
dataset.setColLabel(0, "searches");
dataset.addValueToMatrix(0, 0, totalFiltered.getCount());
dataset.setColLabel(1, "percent-total");
//Ensure that we do NOT divide by 0
float percentTotal;
if(totalCount.getCount() == 0){
percentTotal = 0;
}else{
percentTotal = (float) totalFiltered.getCount() / totalCount.getCount();
}
dataset.addValueToMatrix(0, 1, percentageFormat.format(percentTotal));
dataset.setColLabel(2, "views-search");
//Ensure that we do NOT divide by 0
float pageViews;
if(totalFiltered.getCount() == 0){
pageViews = 0;
}else{
pageViews = (float) totalPageViews.getCount() / totalFiltered.getCount();
}
dataset.addValueToMatrix(0, 2, pageViewFormat.format(pageViews));
}
}else{
throw new IllegalArgumentException("Data generator with class" + datasetGenerator.getClass().getName() + " is not supported by the statistics search engine !");
}
}
return dataset;
}
/**
* Returns the query to be used in solr
* in case of a dso a scopeDso query will be returned otherwise the default *:* query will be used
* @return the query as a string
*/
protected String getQuery() {
String query;
if(currentDso != null){
query = "scopeType: " + currentDso.getType() + " AND scopeId: " + currentDso.getID();
}else{
query = "*:*";
}
return query;
}
private ObjectCount getTotalPageViews(String query, String defaultFilterQuery) throws SolrServerException {
StringBuilder fqBuffer;
fqBuffer = new StringBuilder(defaultFilterQuery);
if(0 < fqBuffer.length())
{
fqBuffer.append(" AND ");
}
fqBuffer.append("statistics_type:").append(SolrLogger.StatisticsType.SEARCH_RESULT.text());
//Retrieve the number of page views by this query !
return SolrLogger.queryTotal(query, fqBuffer.toString());
}
/**
* Returns a filter query that only allows new searches to pass
* new searches are searches that haven't been paged through
* @return a solr filterquery
*/
private String getSearchFilterQuery() {
StringBuilder fqBuffer = new StringBuilder();
fqBuffer.append("statistics_type:").append(SolrLogger.StatisticsType.SEARCH.text());
//Also append a filter query to ensure that paging is left out !
fqBuffer.append(" AND -page:[* TO *]");
return fqBuffer.toString();
}
}

View File

@@ -7,6 +7,7 @@
*/ */
package org.dspace.statistics.content; package org.dspace.statistics.content;
import org.apache.commons.lang.StringUtils;
import org.dspace.content.*; import org.dspace.content.*;
import org.dspace.statistics.Dataset; import org.dspace.statistics.Dataset;
import org.dspace.statistics.ObjectCount; import org.dspace.statistics.ObjectCount;
@@ -148,12 +149,8 @@ public class StatisticsDataVisits extends StatisticsData
} }
// Determine our filterQuery // Determine our filterQuery
String filterQuery = null; String filterQuery = "";
for (int i = 0; i < getFilters().size(); i++) { for (int i = 0; i < getFilters().size(); i++) {
if(filterQuery == null)
{
filterQuery = "";
}
StatisticsFilter filter = getFilters().get(i); StatisticsFilter filter = getFilters().get(i);
filterQuery += "(" + filter.toQuery() + ")"; filterQuery += "(" + filter.toQuery() + ")";
@@ -162,6 +159,14 @@ public class StatisticsDataVisits extends StatisticsData
filterQuery += " AND "; filterQuery += " AND ";
} }
} }
if(StringUtils.isNotBlank(filterQuery)){
filterQuery += " AND ";
}
//Only use the view type and make sure old data (where no view type is present) is also supported
//Solr doesn't explicitly apply boolean logic, so this query cannot be simplified to an OR query
filterQuery += "-(statistics_type:[* TO *] AND -statistics_type:" + SolrLogger.StatisticsType.VIEW.text() + ")";
// System.out.println("FILTERQUERY: " + filterQuery); // System.out.println("FILTERQUERY: " + filterQuery);
// We determine our values on the queries resolved above // We determine our values on the queries resolved above

View File

@@ -0,0 +1,202 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.statistics.content;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.common.SolrDocument;
import org.dspace.content.DCDate;
import org.dspace.content.DSpaceObject;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.services.ConfigurationService;
import org.dspace.statistics.Dataset;
import org.dspace.statistics.ObjectCount;
import org.dspace.statistics.SolrLogger;
import org.dspace.statistics.content.filter.StatisticsFilter;
import org.dspace.utils.DSpace;
import java.io.File;
import java.io.IOException;
import java.sql.SQLException;
import java.text.ParseException;
import java.util.*;
/**
* A workflow data implementation that will query the statistics backend for workflow information
*
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class StatisticsDataWorkflow extends StatisticsData {
private static final Logger log = Logger.getLogger(StatisticsDataWorkflow.class);
/** Current DSpaceObject for which to generate the statistics. */
private DSpaceObject currentDso;
/** Variable used to indicate of how many months an average is required (-1 is inactive) **/
private int averageMonths = -1;
public StatisticsDataWorkflow(DSpaceObject dso, int averageMonths) {
super();
this.currentDso = dso;
this.averageMonths = averageMonths;
}
@Override
public Dataset createDataset(Context context) throws SQLException, SolrServerException, IOException, ParseException {
// Check if we already have one.
// If we do then give it back.
if(getDataset() != null)
{
return getDataset();
}
List<StatisticsFilter> filters = getFilters();
List<String> defaultFilters = new ArrayList<String>();
for (StatisticsFilter statisticsFilter : filters) {
defaultFilters.add(statisticsFilter.toQuery());
}
String defaultFilterQuery = StringUtils.join(defaultFilters.iterator(), " AND ");
String query = getQuery();
Dataset dataset = new Dataset(0,0);
List<DatasetGenerator> datasetGenerators = getDatasetGenerators();
if(0 < datasetGenerators.size()){
//At the moment we can only have one dataset generator
DatasetGenerator datasetGenerator = datasetGenerators.get(0);
if(datasetGenerator instanceof DatasetTypeGenerator){
DatasetTypeGenerator typeGenerator = (DatasetTypeGenerator) datasetGenerator;
ObjectCount[] topCounts = SolrLogger.queryFacetField(query, defaultFilterQuery, typeGenerator.getType(), typeGenerator.getMax(), typeGenerator.isIncludeTotal(), null);
//Retrieve our total field counts
Map<String, Long> totalFieldCounts = new HashMap<String, Long>();
if(averageMonths != -1){
totalFieldCounts = getTotalFacetCounts(typeGenerator);
}
long monthDifference = 1;
if(getOldestWorkflowItemDate() != null){
monthDifference = getMonthsDifference(new Date(), getOldestWorkflowItemDate());
}
dataset = new Dataset(topCounts.length, (averageMonths != -1 ? 3 : 2));
dataset.setColLabel(0, "step");
dataset.setColLabel(1, "performed");
if(averageMonths != -1){
dataset.setColLabel(2, "average");
}
for (int i = 0; i < topCounts.length; i++) {
ObjectCount topCount = topCounts[i];
dataset.setRowLabel(i, String.valueOf(i + 1));
dataset.addValueToMatrix(i, 0, topCount.getValue());
dataset.addValueToMatrix(i, 1, topCount.getCount());
if(averageMonths != -1){
//Calculate the average of one month
long monthlyAverage = 0;
if(totalFieldCounts.get(topCount.getValue()) != null){
monthlyAverage = totalFieldCounts.get(topCount.getValue()) / monthDifference;
}
//We multiple our average for one month by the number of
dataset.addValueToMatrix(i, 2, (monthlyAverage * averageMonths));
}
}
}
}
return dataset;
}
/**
* Returns the query to be used in solr
* in case of a dso a scopeDso query will be returned otherwise the default *:* query will be used
* @return the query as a string
*/
protected String getQuery() {
String query = "statistics_type:" + SolrLogger.StatisticsType.WORKFLOW.text();
query += " AND NOT(previousWorkflowStep: SUBMIT)";
if(currentDso != null){
if(currentDso.getType() == Constants.COMMUNITY){
query += " AND owningComm:";
}else
if(currentDso.getType() == Constants.COLLECTION){
query += " AND owningColl:";
}
query += currentDso.getID();
}
return query;
}
private int getMonthsDifference(Date date1, Date date2) {
int m1 = date1.getYear() * 12 + date1.getMonth();
int m2 = date2.getYear() * 12 + date2.getMonth();
return m2 - m1 + 1;
}
/**
* Retrieve the total counts for the facets (total count is same query but none of the filter queries
* @param typeGenerator the type generator
* @return as a key the
* @throws org.apache.solr.client.solrj.SolrServerException
*/
protected Map<String, Long> getTotalFacetCounts(DatasetTypeGenerator typeGenerator) throws SolrServerException {
ObjectCount[] objectCounts = SolrLogger.queryFacetField(getQuery(), null, typeGenerator.getType(), -1, false, null);
Map<String, Long> result = new HashMap<String, Long>();
for (ObjectCount objectCount : objectCounts) {
result.put(objectCount.getValue(), objectCount.getCount());
}
return result;
}
protected Date getOldestWorkflowItemDate() throws SolrServerException {
ConfigurationService configurationService = new DSpace().getConfigurationService();
String workflowStartDate = configurationService.getProperty("usage-statistics.workflow-start-date");
if(workflowStartDate == null){
//Query our solr for it !
QueryResponse oldestRecord = SolrLogger.query(getQuery(), null, null, 1, 0, null, null, null, null, "time", true);
if(0 < oldestRecord.getResults().getNumFound()){
SolrDocument solrDocument = oldestRecord.getResults().get(0);
Date oldestDate = (Date) solrDocument.getFieldValue("time");
//Store the date, we only need to retrieve this once !
try {
//Also store it in the solr-statics configuration file, the reason for this being that the sort query
//can be very time consuming & we do not want this delay each time we want to see workflow statistics
String solrConfigDir = configurationService.getProperty("dspace.dir") + File.separator + "config"
+ File.separator + "modules" + File.separator + "usage-statistics.cfg";
PropertiesConfiguration config = new PropertiesConfiguration(solrConfigDir);
config.setProperty("workflow-start-date", new DCDate(oldestDate));
config.save();
} catch (ConfigurationException e) {
log.error("Error while storing workflow start date", e);
}
//ALso store it in our local config !
configurationService.setProperty("usage-statistics.workflow-start-date", new DCDate(oldestDate).toString());
//Write to file
return oldestDate;
}else{
return null;
}
}else{
return new DCDate(workflowStartDate).toDate();
}
}
}

View File

@@ -1,155 +1,160 @@
/** /**
* The contents of this file are subject to the license and copyright * The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source * detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at * tree and available online at
* *
* http://www.dspace.org/license/ * http://www.dspace.org/license/
*/ */
package org.dspace.statistics.util; package org.dspace.statistics.util;
import org.apache.commons.cli.*; import org.apache.commons.cli.*;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;
import org.apache.tools.ant.taskdefs.Get; import org.apache.tools.ant.taskdefs.Get;
import org.dspace.core.ConfigurationManager; import org.dspace.core.ConfigurationManager;
import org.dspace.statistics.SolrLogger; import org.dspace.statistics.SolrLogger;
import java.io.*; import java.io.*;
import java.net.URL; import java.net.URL;
/** /**
* Class to load intermediate statistics files into solr * Class to load intermediate statistics files into solr
* *
* @author Stuart Lewis * @author Stuart Lewis
*/ */
public class StatisticsClient public class StatisticsClient
{ {
private static final Logger log = Logger.getLogger(StatisticsClient.class); private static final Logger log = Logger.getLogger(StatisticsClient.class);
/** /**
* Print the help message * Print the help message
* *
* @param options The command line options the user gave * @param options The command line options the user gave
* @param exitCode the system exit code to use * @param exitCode the system exit code to use
*/ */
private static void printHelp(Options options, int exitCode) private static void printHelp(Options options, int exitCode)
{ {
// print the help message // print the help message
HelpFormatter myhelp = new HelpFormatter(); HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("StatisticsClient\n", options); myhelp.printHelp("StatisticsClient\n", options);
System.exit(exitCode); System.exit(exitCode);
} }
/** /**
* Main method to run the statistics importer. * Main method to run the statistics importer.
* *
* @param args The command line arguments * @param args The command line arguments
* @throws Exception If something goes wrong * @throws Exception If something goes wrong
*/ */
public static void main(String[] args) throws Exception public static void main(String[] args) throws Exception
{ {
CommandLineParser parser = new PosixParser(); CommandLineParser parser = new PosixParser();
Options options = new Options(); Options options = new Options();
options.addOption("u", "update-spider-files", false, options.addOption("u", "update-spider-files", false,
"Update Spider IP Files from internet into " + "Update Spider IP Files from internet into " +
ConfigurationManager.getProperty("dspace.dir") + "/config/spiders"); ConfigurationManager.getProperty("dspace.dir") + "/config/spiders");
options.addOption("m", "mark-spiders", false, "Update isBot Flag in Solr"); options.addOption("m", "mark-spiders", false, "Update isBot Flag in Solr");
options.addOption("f", "delete-spiders-by-flag", false, "Delete Spiders in Solr By isBot Flag"); options.addOption("f", "delete-spiders-by-flag", false, "Delete Spiders in Solr By isBot Flag");
options.addOption("i", "delete-spiders-by-ip", false, "Delete Spiders in Solr By IP Address"); options.addOption("i", "delete-spiders-by-ip", false, "Delete Spiders in Solr By IP Address");
options.addOption("o", "optimize", false, "Run maintenance on the SOLR index"); options.addOption("o", "optimize", false, "Run maintenance on the SOLR index");
options.addOption("b", "reindex-bitstreams", false, "Reindex the bitstreams to ensure we have the bundle name"); options.addOption("b", "reindex-bitstreams", false, "Reindex the bitstreams to ensure we have the bundle name");
options.addOption("r", "remove-deleted-bitstreams", false, "While indexing the bundle names remove the statistics about deleted bitstreams"); options.addOption("r", "remove-deleted-bitstreams", false, "While indexing the bundle names remove the statistics about deleted bitstreams");
options.addOption("h", "help", false, "help"); options.addOption("s", "shard-solr-index", false, "Split the data from the main Solr core into separate Solr cores per year");
options.addOption("h", "help", false, "help");
CommandLine line = parser.parse(options, args);
CommandLine line = parser.parse(options, args);
// Did the user ask to see the help?
if (line.hasOption('h')) // Did the user ask to see the help?
{ if (line.hasOption('h'))
printHelp(options, 0); {
} printHelp(options, 0);
}
if(line.hasOption("u"))
{ if(line.hasOption("u"))
StatisticsClient.updateSpiderFiles(); {
} StatisticsClient.updateSpiderFiles();
else if (line.hasOption('m')) }
{ else if (line.hasOption('m'))
SolrLogger.markRobotsByIP(); {
} SolrLogger.markRobotsByIP();
else if(line.hasOption('f')) }
{ else if(line.hasOption('f'))
SolrLogger.deleteRobotsByIsBotFlag(); {
} SolrLogger.deleteRobotsByIsBotFlag();
else if(line.hasOption('i')) }
{ else if(line.hasOption('i'))
SolrLogger.deleteRobotsByIP(); {
} SolrLogger.deleteRobotsByIP();
else if(line.hasOption('o')) }
{ else if(line.hasOption('o'))
SolrLogger.optimizeSOLR(); {
} SolrLogger.optimizeSOLR();
else if(line.hasOption('b')) }
{ else if(line.hasOption('b'))
SolrLogger.reindexBitstreamHits(line.hasOption('r')); {
} SolrLogger.reindexBitstreamHits(line.hasOption('r'));
else }
{ else if(line.hasOption('s'))
printHelp(options, 0); {
} SolrLogger.shardSolrIndex();
} }
else
/** {
* Method to update Spiders in config directory. printHelp(options, 0);
*/ }
private static void updateSpiderFiles() }
{
try /**
{ * Method to update Spiders in config directory.
System.out.println("Downloading latest spider IP addresses:"); */
private static void updateSpiderFiles()
// Get the list URLs to download from {
String urls = ConfigurationManager.getProperty("solr-statistics", "spiderips.urls"); try
if ((urls == null) || ("".equals(urls))) {
{ System.out.println("Downloading latest spider IP addresses:");
System.err.println(" - Missing setting from dspace.cfg: solr.spiderips.urls");
System.exit(0); // Get the list URLs to download from
} String urls = ConfigurationManager.getProperty("solr-statistics", "spiderips.urls");
if ((urls == null) || ("".equals(urls)))
// Get the location of spiders directory {
File spiders = new File(ConfigurationManager.getProperty("dspace.dir"),"config/spiders"); System.err.println(" - Missing setting from dspace.cfg: solr.spiderips.urls");
System.exit(0);
if (!spiders.exists() && !spiders.mkdirs()) }
{
log.error("Unable to create spiders directory"); // Get the location of spiders directory
} File spiders = new File(ConfigurationManager.getProperty("dspace.dir"),"config/spiders");
String[] values = urls.split(","); if (!spiders.exists() && !spiders.mkdirs())
for (String value : values) {
{ log.error("Unable to create spiders directory");
value = value.trim(); }
System.out.println(" Downloading: " + value);
String[] values = urls.split(",");
URL url = new URL(value); for (String value : values)
{
Get get = new Get(); value = value.trim();
get.setDest(new File(spiders, url.getHost() + url.getPath().replace("/","-"))); System.out.println(" Downloading: " + value);
get.setSrc(url);
get.setUseTimestamp(true); URL url = new URL(value);
get.execute();
Get get = new Get();
} get.setDest(new File(spiders, url.getHost() + url.getPath().replace("/","-")));
get.setSrc(url);
get.setUseTimestamp(true);
} catch (Exception e) get.execute();
{
System.err.println(" - Error: " + e.getMessage()); }
e.printStackTrace();
System.exit(1);
} } catch (Exception e)
} {
System.err.println(" - Error: " + e.getMessage());
e.printStackTrace();
} System.exit(1);
}
}
}

View File

@@ -10,14 +10,12 @@ package org.dspace.app.xmlui.aspect.artifactbrowser;
import java.io.IOException; import java.io.IOException;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map; import java.util.Map;
import org.apache.cocoon.caching.CacheableProcessingComponent; import org.apache.cocoon.caching.CacheableProcessingComponent;
import org.apache.cocoon.environment.ObjectModelHelper; import org.apache.cocoon.environment.ObjectModelHelper;
import org.apache.cocoon.environment.Request; import org.apache.cocoon.environment.Request;
import org.apache.oro.text.perl.Perl5Util;
import org.dspace.app.xmlui.utils.HandleUtil; import org.dspace.app.xmlui.utils.HandleUtil;
import org.dspace.app.xmlui.utils.UIException; import org.dspace.app.xmlui.utils.UIException;
import org.dspace.app.xmlui.wing.Message; import org.dspace.app.xmlui.wing.Message;
@@ -98,10 +96,9 @@ public class AdvancedSearch extends AbstractSearch implements CacheableProcessin
/** How many conjunction fields to display */ /** How many conjunction fields to display */
private static final int FIELD_DISPLAY_COUNT = 3; private static final int FIELD_DISPLAY_COUNT = 3;
private static final int FIELD_MAX_COUNT = 12;
/** A cache of extracted search fields */ /** A cache of extracted search fields */
private java.util.List<SearchField> fields; private java.util.List<AdvancedSearchUtils.SearchField> fields;
/** /**
* Add Page metadata. * Add Page metadata.
@@ -167,7 +164,7 @@ public class AdvancedSearch extends AbstractSearch implements CacheableProcessin
buildQueryField(i, row.addCell()); buildQueryField(i, row.addCell());
} }
for (SearchField field : fields) for (AdvancedSearchUtils.SearchField field : fields)
{ {
// Skip over all the fields we've displayed. // Skip over all the fields we've displayed.
int i = field.getIndex(); int i = field.getIndex();
@@ -315,7 +312,7 @@ public class AdvancedSearch extends AbstractSearch implements CacheableProcessin
parameters.put("scope", scope); parameters.put("scope", scope);
} }
for (SearchField searchField : getSearchFields(request)) for (AdvancedSearchUtils.SearchField searchField : getSearchFields(request))
{ {
int index = searchField.getIndex(); int index = searchField.getIndex();
String field = searchField.getField(); String field = searchField.getField();
@@ -363,188 +360,13 @@ public class AdvancedSearch extends AbstractSearch implements CacheableProcessin
protected String getQuery() throws UIException protected String getQuery() throws UIException
{ {
Request request = ObjectModelHelper.getRequest(objectModel); Request request = ObjectModelHelper.getRequest(objectModel);
return buildQuery(getSearchFields(request)); return AdvancedSearchUtils.buildQuery(getSearchFields(request));
} }
/**
* Given a list of search fields buld a lucene search query string.
*
* @param fields The search fields
* @return A string
*/
private String buildQuery(java.util.List<SearchField> fields)
{
Perl5Util util = new Perl5Util();
StringBuilder query = new StringBuilder();
query.append("(");
// Loop through the fields building the search query as we go.
for (SearchField field : fields)
{
// if the field is empty, then skip it and try a later one.
if (field.getQuery() == null)
{
continue;
}
// Add the conjunction for everything but the first field.
if (fields.indexOf(field) > 0)
{
query.append(" ").append(field.getConjunction()).append(" ").toString();
}
// Two cases, one if a specific search field is specified or if
// ANY is given then just a general search is performed.
if ("ANY".equals(field.getField()))
{
// No field specified,
query.append("(").append(field.getQuery()).append(")").toString();
}
else
{
// Specific search field specified, add the field specific field.
// Replace single quotes with double quotes (only if they match)
String subQuery = util.substitute("s/\'(.*)\'/\"$1\"/g", field.getQuery());
// If the field is not quoted ...
if (!util.match("/\".*\"/", subQuery))
{
// ... then separate each word and re-specify the search field.
subQuery = util.substitute("s/[ ]+/ " + field.getField() + ":/g", subQuery);
}
// Put the subQuery into the general query
query.append("(").append(field.getField()).append(":").append(subQuery).append(")").toString();
}
}
private java.util.List<AdvancedSearchUtils.SearchField> getSearchFields(Request request) throws UIException {
if (query.length() == 1) if (fields == null){
{ fields = AdvancedSearchUtils.getSearchFields(request);
return "";
} }
return fields;
return query.append(")").toString();
} }
/**
* Get a list of search fields from the request object
* and parse them into a linear array of fileds. The field's
* index is preserved, so if it comes in as index 17 it will
* be outputted as field 17.
*
* @param request The http request object
* @return Array of search fields
* @throws UIException
*/
public java.util.List<SearchField> getSearchFields(Request request) throws UIException
{
if (this.fields != null)
{
return this.fields;
}
// Get how many fields to search
int numSearchField;
try {
String numSearchFieldStr = request.getParameter("num_search_field");
numSearchField = Integer.valueOf(numSearchFieldStr);
}
catch (NumberFormatException nfe)
{
numSearchField = FIELD_MAX_COUNT;
}
// Iterate over all the possible fields and add each one to the list of fields.
ArrayList<SearchField> fields = new ArrayList<SearchField>();
for (int i = 1; i <= numSearchField; i++)
{
String field = request.getParameter("field"+i);
String query = decodeFromURL(request.getParameter("query"+i));
String conjunction = request.getParameter("conjunction"+i);
if (field != null)
{
field = field.trim();
if (field.length() == 0)
{
field = null;
}
}
if (query != null)
{
query = query.trim();
if (query.length() == 0)
{
query = null;
}
}
if (conjunction != null)
{
conjunction = conjunction.trim();
if (conjunction.length() == 0)
{
conjunction = null;
}
}
if (field == null)
{
field = "ANY";
}
if (conjunction == null)
{
conjunction = "AND";
}
if (query != null)
{
fields.add(new SearchField(i, field, query, conjunction));
}
}
this.fields = fields;
return this.fields;
}
/**
* A private record keeping class to relate the various
* components of a search field together.
*/
private static class SearchField {
/** What index the search field is, typically there are just three - but the theme may expand this number */
private int index;
/** The field to search, ANY if none specified */
private String field;
/** The query string to search for */
private String query;
/** the conjunction: either "AND" or "OR" */
private String conjuction;
public SearchField(int index, String field, String query, String conjunction)
{
this.index = index;
this.field = field;
this.query = query;
this.conjuction = conjunction;
}
public int getIndex() { return this.index;}
public String getField() { return this.field;}
public String getQuery() { return this.query;}
public String getConjunction() { return this.conjuction;}
}
} }

View File

@@ -0,0 +1,44 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.xmlui.aspect.artifactbrowser;
import org.apache.cocoon.environment.Request;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.dspace.app.xmlui.cocoon.SearchLoggerAction;
import org.dspace.app.xmlui.utils.UIException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/**
* Returns the query for the advanced search so our SearchLoggerAction can log this
*
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class AdvancedSearchLoggerAction extends SearchLoggerAction {
private static final Logger log = Logger.getLogger(SearchLoggerAction.class);
@Override
protected List<String> getQueries(Request request) {
try {
String advancedSearchQuery = AdvancedSearchUtils.buildQuery(AdvancedSearchUtils.getSearchFields(request));
if(!StringUtils.isBlank(advancedSearchQuery))
{
return Arrays.asList(advancedSearchQuery);
}
} catch (UIException e) {
log.error(e.getMessage(), e);
}
return new ArrayList<String>();
}
}

View File

@@ -0,0 +1,233 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.xmlui.aspect.artifactbrowser;
import org.apache.cocoon.environment.Request;
import org.apache.oro.text.perl.Perl5Util;
import org.dspace.app.xmlui.utils.UIException;
import org.dspace.core.Constants;
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.util.ArrayList;
/**
* Central place where advanced search queries can be built since these are built on several places
*
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class AdvancedSearchUtils {
private static final int FIELD_MAX_COUNT = 12;
/**
* Given a list of search fields build a lucene search query string.
*
* @param fields The search fields
* @return A string
*/
public static String buildQuery(java.util.List<SearchField> fields)
{
Perl5Util util = new Perl5Util();
StringBuilder query = new StringBuilder();
query.append("(");
// Loop through the fields building the search query as we go.
for (SearchField field : fields)
{
// if the field is empty, then skip it and try a later one.
if (field.getQuery() == null)
{
continue;
}
// Add the conjunction for everything but the first field.
if (fields.indexOf(field) > 0)
{
query.append(" ").append(field.getConjunction()).append(" ");
}
// Two cases, one if a specific search field is specified or if
// ANY is given then just a general search is performed.
if ("ANY".equals(field.getField()))
{
// No field specified,
query.append("(").append(field.getQuery()).append(")");
}
else
{
// Specific search field specified, add the field specific field.
// Replace single quotes with double quotes (only if they match)
String subQuery = util.substitute("s/\'(.*)\'/\"$1\"/g", field.getQuery());
// If the field is not quoted ...
if (!util.match("/\".*\"/", subQuery))
{
// ... then separate each word and re-specify the search field.
subQuery = util.substitute("s/[ ]+/ " + field.getField() + ":/g", subQuery);
}
// Put the subQuery into the general query
query.append("(").append(field.getField()).append(":").append(subQuery).append(")");
}
}
if (query.length() == 1)
{
return "";
}
return query.append(")").toString();
}
/**
* Get a list of search fields from the request object
* and parse them into a linear array of fileds. The field's
* index is preserved, so if it comes in as index 17 it will
* be outputted as field 17.
*
* @param request The http request object
* @return Array of search fields
* @throws org.dspace.app.xmlui.utils.UIException
*/
public static java.util.List<SearchField> getSearchFields(Request request) throws UIException
{
// Get how many fields to search
int numSearchField;
try {
String numSearchFieldStr = request.getParameter("num_search_field");
numSearchField = Integer.valueOf(numSearchFieldStr);
}
catch (NumberFormatException nfe)
{
numSearchField = FIELD_MAX_COUNT;
}
// Iterate over all the possible fields and add each one to the list of fields.
ArrayList<SearchField> fields = new ArrayList<SearchField>();
for (int i = 1; i <= numSearchField; i++)
{
String field = request.getParameter("field"+i);
String query = decodeFromURL(request.getParameter("query"+i));
String conjunction = request.getParameter("conjunction"+i);
if (field != null)
{
field = field.trim();
if (field.length() == 0)
{
field = null;
}
}
if (query != null)
{
query = query.trim();
if (query.length() == 0)
{
query = null;
}
}
if (conjunction != null)
{
conjunction = conjunction.trim();
if (conjunction.length() == 0)
{
conjunction = null;
}
}
if (field == null)
{
field = "ANY";
}
if (conjunction == null)
{
conjunction = "AND";
}
if (query != null)
{
fields.add(new SearchField(i, field, query, conjunction));
}
}
return fields;
}
/**
* A private record keeping class to relate the various
* components of a search field together.
*/
public static class SearchField {
/** What index the search field is, typically there are just three - but the theme may expand this number */
private int index;
/** The field to search, ANY if none specified */
private String field;
/** The query string to search for */
private String query;
/** the conjunction: either "AND" or "OR" */
private String conjuction;
public SearchField(int index, String field, String query, String conjunction)
{
this.index = index;
this.field = field;
this.query = query;
this.conjuction = conjunction;
}
public int getIndex() { return this.index;}
public String getField() { return this.field;}
public String getQuery() { return this.query;}
public String getConjunction() { return this.conjuction;}
}
/**
* Decode the given string from URL transmission.
*
* @param encodedString
* The encoded string.
* @return The unencoded string
*/
private static String decodeFromURL(String encodedString) throws UIException
{
if (encodedString == null)
{
return null;
}
try
{
// Percent(%) is a special character, and must first be escaped as %25
if (encodedString.contains("%"))
{
encodedString = encodedString.replace("%", "%25");
}
return URLDecoder.decode(encodedString, Constants.DEFAULT_ENCODING);
}
catch (UnsupportedEncodingException uee)
{
throw new UIException(uee);
}
}
}

View File

@@ -0,0 +1,36 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.xmlui.aspect.artifactbrowser;
import org.apache.cocoon.environment.Request;
import org.apache.commons.lang.StringUtils;
import org.dspace.app.xmlui.cocoon.SearchLoggerAction;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/**
* Returns the query for a simple search so our SearchLoggerAction can log this
*
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class SimpleSearchLoggerAction extends SearchLoggerAction{
@Override
protected List<String> getQueries(Request request) {
String query = request.getParameter("query");
if(!StringUtils.isBlank(query)){
return Arrays.asList(request.getParameter("query"));
}else{
return new ArrayList<String>();
}
}
}

View File

@@ -0,0 +1,155 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.xmlui.aspect.statistics;
import org.apache.cocoon.environment.ObjectModelHelper;
import org.apache.cocoon.environment.Request;
import org.apache.commons.lang.StringUtils;
import org.apache.solr.client.solrj.SolrServerException;
import org.dspace.app.xmlui.cocoon.AbstractDSpaceTransformer;
import org.dspace.app.xmlui.wing.Message;
import org.dspace.app.xmlui.wing.WingException;
import org.dspace.app.xmlui.wing.element.*;
import org.dspace.statistics.Dataset;
import org.dspace.statistics.content.StatisticsTable;
import org.dspace.statistics.content.filter.StatisticsSolrDateFilter;
import org.xml.sax.SAXException;
import java.io.IOException;
import java.sql.SQLException;
import java.text.ParseException;
/**
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public abstract class AbstractStatisticsDataTransformer extends AbstractDSpaceTransformer {
private static final Message T_time_filter_last_month = message("xmlui.statistics.StatisticsSearchTransformer.time-filter.last-month");
private static final Message T_time_filter_overall = message("xmlui.statistics.StatisticsSearchTransformer.time-filter.overall");
private static final Message T_time_filter_last_year = message("xmlui.statistics.StatisticsSearchTransformer.time-filter.last-year");
private static final Message T_time_filter_last6_months = message("xmlui.statistics.StatisticsSearchTransformer.time-filter.last-6-months");
protected void addTimeFilter(Division mainDivision) throws WingException {
Request request = ObjectModelHelper.getRequest(objectModel);
String selectedTimeFilter = request.getParameter("time_filter");
Select timeFilter = mainDivision.addPara().addSelect("time_filter");
timeFilter.addOption(StringUtils.equals(selectedTimeFilter, "-1"), "-1", T_time_filter_last_month);
timeFilter.addOption(StringUtils.equals(selectedTimeFilter, "-6"), "-6", T_time_filter_last6_months);
timeFilter.addOption(StringUtils.equals(selectedTimeFilter, "-12"), "-12", T_time_filter_last_year);
timeFilter.addOption(StringUtils.isBlank(selectedTimeFilter), "", T_time_filter_overall);
}
protected StatisticsSolrDateFilter getDateFilter(String timeFilter){
if(StringUtils.isNotEmpty(timeFilter))
{
StatisticsSolrDateFilter dateFilter = new StatisticsSolrDateFilter();
dateFilter.setStartStr(timeFilter);
dateFilter.setEndStr("0");
dateFilter.setTypeStr("month");
return dateFilter;
}else{
return null;
}
}
/**
* Adds a table layout to the page
*
* @param mainDiv
* the div to add the table to
* @param display the statistics table containing our data
* @throws org.xml.sax.SAXException
* @throws org.dspace.app.xmlui.wing.WingException
* @throws java.text.ParseException
* @throws java.io.IOException
* @throws org.apache.solr.client.solrj.SolrServerException
* @throws java.sql.SQLException
*/
protected void addDisplayTable(Division mainDiv, StatisticsTable display, boolean addRowTitles, String []valueMessagePrefixes)
throws SAXException, WingException, SQLException,
SolrServerException, IOException, ParseException {
String title = display.getTitle();
Dataset dataset = display.getDataset();
if (dataset == null)
{
/** activate dataset query */
dataset = display.getDataset(context);
}
if (dataset != null)
{
String[][] matrix = dataset.getMatrix();
if(matrix.length == 0){
//If no results are found alert the user of this !
mainDiv.addPara(getNoResultsMessage());
return;
}
/** Generate Table */
Division wrapper = mainDiv.addDivision("tablewrapper");
Table table = wrapper.addTable("list-table", 1, 1,
title == null ? "detailtable" : "tableWithTitle detailtable");
if (title != null)
{
table.setHead(message(title));
}
/** Generate Header Row */
Row headerRow = table.addRow();
if(addRowTitles)
{
headerRow.addCell("spacer", Cell.ROLE_HEADER, "labelcell");
}
String[] cLabels = dataset.getColLabels().toArray(new String[0]);
for (int row = 0; row < cLabels.length; row++)
{
Cell cell = headerRow.addCell(0 + "-" + row + "-h", Cell.ROLE_HEADER, "labelcell");
cell.addContent(message("xmlui.statistics.display.table.column-label." + cLabels[row]));
}
/** Generate Table Body */
for (int row = 0; row < matrix.length; row++) {
Row valListRow = table.addRow();
if(addRowTitles){
/** Add Row Title */
valListRow.addCell("" + row, Cell.ROLE_DATA, "labelcell")
.addContent(dataset.getRowLabels().get(row));
}
/** Add Rest of Row */
for (int col = 0; col < matrix[row].length; col++) {
Cell cell = valListRow.addCell(row + "-" + col,
Cell.ROLE_DATA, "datacell");
String messagePrefix = null;
if(valueMessagePrefixes != null && col < valueMessagePrefixes.length){
messagePrefix = valueMessagePrefixes[col];
}
if(messagePrefix != null){
cell.addContent(message(messagePrefix + matrix[row][col]));
}else{
cell.addContent(matrix[row][col]);
}
}
}
}
}
protected abstract Message getNoResultsMessage();
}

View File

@@ -15,10 +15,15 @@ import org.dspace.app.xmlui.wing.Message;
import org.dspace.app.xmlui.utils.UIException; import org.dspace.app.xmlui.utils.UIException;
import org.dspace.app.xmlui.utils.HandleUtil; import org.dspace.app.xmlui.utils.HandleUtil;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.AuthorizeManager;
import org.dspace.content.DSpaceObject; import org.dspace.content.DSpaceObject;
import org.apache.cocoon.caching.CacheableProcessingComponent; import org.apache.cocoon.caching.CacheableProcessingComponent;
import org.apache.excalibur.source.SourceValidity; import org.apache.excalibur.source.SourceValidity;
import org.apache.excalibur.source.impl.validity.NOPValidity; import org.apache.excalibur.source.impl.validity.NOPValidity;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.services.ConfigurationService;
import org.dspace.utils.DSpace;
import org.xml.sax.SAXException; import org.xml.sax.SAXException;
import java.io.Serializable; import java.io.Serializable;
@@ -35,7 +40,9 @@ import java.sql.SQLException;
public class Navigation extends AbstractDSpaceTransformer implements CacheableProcessingComponent { public class Navigation extends AbstractDSpaceTransformer implements CacheableProcessingComponent {
private static final Message T_statistics_head = message("xmlui.statistics.Navigation.title"); private static final Message T_statistics_head = message("xmlui.statistics.Navigation.title");
private static final Message T_statistics_view = message("xmlui.statistics.Navigation.view"); private static final Message T_statistics_usage_view = message("xmlui.statistics.Navigation.usage.view");
private static final Message T_statistics_search_view = message("xmlui.statistics.Navigation.search.view");
private static final Message T_statistics_workflow_view = message("xmlui.statistics.Navigation.workflow.view");
public Serializable getKey() { public Serializable getKey() {
//TODO: DO THIS //TODO: DO THIS
@@ -67,17 +74,46 @@ public class Navigation extends AbstractDSpaceTransformer implements CacheablePr
List statistics = options.addList("statistics"); List statistics = options.addList("statistics");
DSpaceObject dso = HandleUtil.obtainHandle(objectModel); DSpaceObject dso = HandleUtil.obtainHandle(objectModel);
boolean displayUsageStats = displayStatsType(context, "usage", dso);
boolean displaySearchStats = displayStatsType(context, "search", dso);
boolean displayWorkflowStats = displayStatsType(context, "workflow", dso);
if(dso != null && dso.getHandle() != null){ if(dso != null && dso.getHandle() != null){
statistics.setHead(T_statistics_head); statistics.setHead(T_statistics_head);
statistics.addItemXref(contextPath + "/handle/" + dso.getHandle() + "/statistics", T_statistics_view); if(displayUsageStats){
statistics.addItemXref(contextPath + "/handle/" + dso.getHandle() + "/statistics", T_statistics_usage_view);
}
//Items cannot have search statistics
if(displaySearchStats && dso.getType() != Constants.ITEM){
statistics.addItemXref(contextPath + "/handle/" + dso.getHandle() + "/search-statistics", T_statistics_search_view);
}
//Items cannot have workflow statistics
if(displayWorkflowStats && dso.getType() != Constants.ITEM){
statistics.addItemXref(contextPath + "/handle/" + dso.getHandle() + "/workflow-statistics", T_statistics_workflow_view);
}
}else{ }else{
// This Navigation is only called either on a DSO related page, or the homepage // This Navigation is only called either on a DSO related page, or the homepage
// If on the home page: add statistics link for the home page // If on the home page: add statistics link for the home page
statistics.setHead(T_statistics_head); statistics.setHead(T_statistics_head);
statistics.addItemXref(contextPath + "/statistics-home", T_statistics_view); if(displayUsageStats){
statistics.addItemXref(contextPath + "/statistics-home", T_statistics_usage_view.parameterize());
}
if(displaySearchStats){
statistics.addItemXref(contextPath + "/search-statistics", T_statistics_search_view);
}
if(displayWorkflowStats){
statistics.addItemXref(contextPath + "/workflow-statistics", T_statistics_workflow_view);
}
} }
} }
protected boolean displayStatsType(Context context, String type, DSpaceObject dso) throws SQLException {
ConfigurationService cs = new DSpace().getConfigurationService();
return !cs.getPropertyAsType("usage-statistics.authorization.admin." + type, Boolean.TRUE) || AuthorizeManager.isAdmin(context, dso);
}
} }

View File

@@ -0,0 +1,80 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.xmlui.aspect.statistics;
import org.apache.avalon.framework.parameters.Parameters;
import org.apache.cocoon.acting.AbstractAction;
import org.apache.cocoon.environment.ObjectModelHelper;
import org.apache.cocoon.environment.Redirector;
import org.apache.cocoon.environment.Request;
import org.apache.cocoon.environment.SourceResolver;
import org.apache.cocoon.environment.http.HttpEnvironment;
import org.apache.commons.lang.StringUtils;
import org.dspace.app.xmlui.utils.ContextUtil;
import org.dspace.app.xmlui.utils.HandleUtil;
import org.dspace.content.DSpaceObject;
import org.dspace.core.Context;
import org.dspace.handle.HandleManager;
import org.dspace.usage.UsageEvent;
import org.dspace.usage.UsageSearchEvent;
import org.dspace.utils.DSpace;
import javax.servlet.http.HttpServletResponse;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
/**
* Action that fires a search event & redirect the user to the view page of the object clicked on in the search results
*
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class SearchResultLogAction extends AbstractAction {
@Override
public Map act(Redirector redirector, SourceResolver resolver, Map objectModel, String source, Parameters parameters) throws Exception {
Request request = ObjectModelHelper.getRequest(objectModel);
Context context = ContextUtil.obtainContext(objectModel);
DSpaceObject scope = HandleUtil.obtainHandle(objectModel);
String redirectUrl = request.getParameter("redirectUrl");
String resultHandle = StringUtils.substringAfter(redirectUrl, "/handle/");
DSpaceObject result = HandleManager.resolveToObject(ContextUtil.obtainContext(request), resultHandle);
//Fire an event to log our search result
UsageSearchEvent searchEvent = new UsageSearchEvent(
UsageEvent.Action.SEARCH,
request,
context,
result,
Arrays.asList(request.getParameterValues("query")), scope);
if(!StringUtils.isBlank(request.getParameter("rpp"))){
searchEvent.setRpp(Integer.parseInt(request.getParameter("rpp")));
}
if(!StringUtils.isBlank(request.getParameter("sort_by"))){
searchEvent.setSortBy(request.getParameter("sort_by"));
}
if(!StringUtils.isBlank(request.getParameter("order"))){
searchEvent.setSortOrder(request.getParameter("order"));
}
if(!StringUtils.isBlank(request.getParameter("page"))){
searchEvent.setPage(Integer.parseInt(request.getParameter("page")));
}
new DSpace().getEventService().fireEvent(
searchEvent);
HttpServletResponse httpResponse = (HttpServletResponse) objectModel.get(HttpEnvironment.HTTP_RESPONSE_OBJECT);
httpResponse.sendRedirect(redirectUrl);
return new HashMap();
}
}

View File

@@ -1,95 +1,111 @@
/** /**
* The contents of this file are subject to the license and copyright * The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source * detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at * tree and available online at
* *
* http://www.dspace.org/license/ * http://www.dspace.org/license/
*/ */
package org.dspace.app.xmlui.aspect.statistics; package org.dspace.app.xmlui.aspect.statistics;
import org.apache.cocoon.matching.Matcher; import org.apache.cocoon.matching.Matcher;
import org.apache.cocoon.sitemap.PatternException; import org.apache.cocoon.sitemap.PatternException;
import org.apache.avalon.framework.parameters.Parameters; import org.apache.avalon.framework.parameters.Parameters;
import org.apache.avalon.framework.logger.AbstractLogEnabled; import org.apache.avalon.framework.logger.AbstractLogEnabled;
import org.dspace.core.Context; import org.dspace.core.Context;
import org.dspace.core.ConfigurationManager; import org.dspace.core.ConfigurationManager;
import org.dspace.core.Constants; import org.dspace.core.Constants;
import org.dspace.app.xmlui.utils.ContextUtil; import org.dspace.app.xmlui.utils.ContextUtil;
import org.dspace.app.xmlui.utils.HandleUtil; import org.dspace.app.xmlui.utils.HandleUtil;
import org.dspace.content.DSpaceObject; import org.dspace.content.DSpaceObject;
import org.dspace.authorize.AuthorizeManager; import org.dspace.authorize.AuthorizeManager;
import java.util.Map; import java.util.Map;
import java.util.HashMap; import java.util.HashMap;
import java.sql.SQLException; import java.sql.SQLException;
/** /**
* User: @author kevinvandevelde (kevin at atmire.com) * User: @author kevinvandevelde (kevin at atmire.com)
* Date: 19-nov-2009 * Date: 19-nov-2009
* Time: 17:19:56 * Time: 17:19:56
*/ */
public class StatisticsAuthorizedMatcher extends AbstractLogEnabled implements Matcher{ public class StatisticsAuthorizedMatcher extends AbstractLogEnabled implements Matcher{
public Map match(String pattern, Map objectModel, Parameters parameters) throws PatternException { public Map match(String pattern, Map objectModel, Parameters parameters) throws PatternException {
// Are we checking for *NOT* the action or the action. String[] statisticsDisplayTypes = parameters.getParameter("type", "").split(",");
boolean not = false;
int action = Constants.READ; // the action to check // Are we checking for *NOT* the action or the action.
boolean not = false;
if (pattern.startsWith("!")) int action = Constants.READ; // the action to check
{
not = true; if (pattern.startsWith("!"))
pattern = pattern.substring(1); {
} not = true;
pattern = pattern.substring(1);
if(!pattern.equals("READ")) }
{
getLogger().warn("Invalid action: '"+pattern+"'"); if(!pattern.equals("READ"))
return null; {
} getLogger().warn("Invalid action: '"+pattern+"'");
return null;
try }
{
Context context = ContextUtil.obtainContext(objectModel); try
DSpaceObject dso = HandleUtil.obtainHandle(objectModel); {
Context context = ContextUtil.obtainContext(objectModel);
//We have always got rights to view stats on the home page (admin rights will be checked later) DSpaceObject dso = HandleUtil.obtainHandle(objectModel);
boolean authorized = dso == null || AuthorizeManager.authorizeActionBoolean(context, dso, action, false);
//If we are authorized check for any other authorization actions present //We have always got rights to view stats on the home page (admin rights will be checked later)
if(authorized && ConfigurationManager.getBooleanProperty("usage-statistics", "authorization.admin")) boolean authorized = dso == null || AuthorizeManager.authorizeActionBoolean(context, dso, action, false);
{ //Check if (one of our) display type is admin only
//If we have no user, we cannot be admin //If one of the given ones isn't admin only, no need to check !
if(context.getCurrentUser() == null) boolean adminCheckNeeded = true;
{ for (String statisticsDisplayType : statisticsDisplayTypes) {
authorized = false; //Only usage statics are available on an item level
} if(!"usage".equals(statisticsDisplayType) && dso != null && dso.getType() == Constants.ITEM){
continue;
if(authorized){ }
//Check for admin //If one isn't admin enabled no need to check for admin
authorized = AuthorizeManager.isAdmin(context); if(!ConfigurationManager.getBooleanProperty("usage-statistics", "authorization.admin." + statisticsDisplayType, true)){
if(!authorized) adminCheckNeeded = false;
{ }
//Check if we have authorization for the owning colls, comms, ... }
authorized = AuthorizeManager.isAdmin(context, dso);
} //If we are authorized check for any other authorization actions present
} if(authorized && adminCheckNeeded)
} {
//If we have no user, we cannot be admin
// XOR if(context.getCurrentUser() == null)
if (not ^ authorized) {
{ authorized = false;
return new HashMap(); }
}
else if(authorized){
{ //Check for admin
return null; authorized = AuthorizeManager.isAdmin(context);
} if(!authorized)
{
//Check if we have authorization for the owning colls, comms, ...
} authorized = AuthorizeManager.isAdmin(context, dso);
catch (SQLException sqle) }
{ }
throw new PatternException("Unable to obtain DSpace Context", sqle); }
}
} // XOR
} if (not ^ authorized)
{
return new HashMap();
}
else
{
return null;
}
}
catch (SQLException sqle)
{
throw new PatternException("Unable to obtain DSpace Context", sqle);
}
}
}

View File

@@ -0,0 +1,114 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.xmlui.aspect.statistics;
import org.apache.cocoon.ProcessingException;
import org.apache.cocoon.environment.ObjectModelHelper;
import org.apache.cocoon.environment.Request;
import org.apache.commons.lang.StringUtils;
import org.dspace.app.xmlui.aspect.artifactbrowser.AdvancedSearchUtils;
import org.dspace.app.xmlui.cocoon.AbstractDSpaceTransformer;
import org.dspace.app.xmlui.utils.HandleUtil;
import org.dspace.app.xmlui.utils.UIException;
import org.dspace.app.xmlui.wing.WingException;
import org.dspace.app.xmlui.wing.element.Body;
import org.dspace.app.xmlui.wing.element.Division;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject;
import org.dspace.handle.HandleManager;
import org.xml.sax.SAXException;
import java.io.IOException;
import java.sql.SQLException;
/**
* Transformer that adds a hidden form
* which will be submitted each time an dspace object link is clicked on a lucene search page
* This form will ensure that the results clicked after each search are logged for the search statistics
*
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class StatisticsSearchResultTransformer extends AbstractDSpaceTransformer {
@Override
public void addBody(Body body) throws SAXException, WingException, SQLException, IOException, AuthorizeException, ProcessingException {
Request request = ObjectModelHelper.getRequest(objectModel);
StringBuilder formUrl = new StringBuilder();
formUrl.append(request.getContextPath());
DSpaceObject scope = getScope();
if(scope != null){
formUrl.append("/handle/").append(scope.getHandle());
}
formUrl.append("/dso-display");
Division mainForm = body.addInteractiveDivision("dso-display", formUrl.toString(), Division.METHOD_POST, "");
mainForm.addHidden("query").setValue(getQuery());
if(!StringUtils.isBlank(request.getParameter("rpp"))){
mainForm.addHidden("rpp").setValue(Integer.parseInt(request.getParameter("rpp")));
}
if(!StringUtils.isBlank(request.getParameter("sort_by"))){
mainForm.addHidden("sort_by").setValue(request.getParameter("sort_by"));
}
if(!StringUtils.isBlank(request.getParameter("order"))){
mainForm.addHidden("order").setValue(request.getParameter("order"));
}
if(!StringUtils.isBlank(request.getParameter("page"))){
mainForm.addHidden("page").setValue(Integer.parseInt(request.getParameter("page")));
}
//This hidden input will contain the resulting url to which we redirect once our work has been completed
mainForm.addHidden("redirectUrl");
}
private String getQuery() throws UIException {
Request request = ObjectModelHelper.getRequest(objectModel);
if(parameters.getParameterAsBoolean("advanced-search", false)){
return AdvancedSearchUtils.buildQuery(AdvancedSearchUtils.getSearchFields(request));
}else{
String query = decodeFromURL(request.getParameter("query"));
if (query == null)
{
return "";
}
return query;
}
}
/**
* Determine the current scope. This may be derived from the current url
* handle if present or the scope parameter is given. If no scope is
* specified then null is returned.
*
* @return The current scope.
*/
protected DSpaceObject getScope() throws SQLException
{
Request request = ObjectModelHelper.getRequest(objectModel);
String scopeString = request.getParameter("scope");
// Are we in a community or collection?
DSpaceObject dso;
if (scopeString == null || "".equals(scopeString))
{
// get the search scope from the url handle
dso = HandleUtil.obtainHandle(objectModel);
}
else
{
// Get the search scope from the location parameter
dso = HandleManager.resolveToObject(context, scopeString);
}
return dso;
}
}

View File

@@ -0,0 +1,136 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.xmlui.aspect.statistics;
import org.apache.cocoon.ProcessingException;
import org.apache.cocoon.environment.ObjectModelHelper;
import org.apache.cocoon.environment.Request;
import org.dspace.app.xmlui.utils.HandleUtil;
import org.dspace.app.xmlui.utils.UIException;
import org.dspace.app.xmlui.wing.Message;
import org.dspace.app.xmlui.wing.WingException;
import org.dspace.app.xmlui.wing.element.Body;
import org.dspace.app.xmlui.wing.element.Division;
import org.dspace.app.xmlui.wing.element.PageMeta;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject;
import org.dspace.statistics.content.DatasetSearchGenerator;
import org.dspace.statistics.content.StatisticsDataSearches;
import org.dspace.statistics.content.StatisticsTable;
import org.dspace.statistics.content.filter.StatisticsSolrDateFilter;
import org.xml.sax.SAXException;
import java.io.IOException;
import java.sql.SQLException;
/**
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class StatisticsSearchTransformer extends AbstractStatisticsDataTransformer {
private static final Message T_dspace_home = message("xmlui.general.dspace_home");
private static final Message T_search_terms_head = message("xmlui.statistics.StatisticsSearchTransformer.search-terms.head");
private static final Message T_search_total_head = message("xmlui.statistics.StatisticsSearchTransformer.search-total.head");
private static final Message T_trail = message("xmlui.statistics.trail-search");
private static final Message T_head_title = message("xmlui.statistics.search.title");
private static final Message T_retrieval_error = message("xmlui.statistics.search.error");
private static final Message T_search_head = message("xmlui.statistics.search.head");
private static final Message T_search_head_dso = message("xmlui.statistics.search.head-dso");
private static final Message T_no_results = message("xmlui.statistics.search.no-results");
/**
* Add a page title and trail links
*/
public void addPageMeta(PageMeta pageMeta) throws SAXException, WingException, SQLException, IOException, AuthorizeException {
//Try to find our dspace object
DSpaceObject dso = HandleUtil.obtainHandle(objectModel);
pageMeta.addTrailLink(contextPath + "/",T_dspace_home);
if(dso != null)
{
HandleUtil.buildHandleTrail(dso, pageMeta, contextPath);
}
pageMeta.addTrailLink(contextPath + (dso != null && dso.getHandle() != null ? "/handle/" + dso.getHandle() : "") + "/search-statistics", T_trail);
// Add the page title
pageMeta.addMetadata("title").addContent(T_head_title);
}
@Override
public void addBody(Body body) throws SAXException, WingException, SQLException, IOException, AuthorizeException, ProcessingException {
//Try to find our dspace object
DSpaceObject dso = HandleUtil.obtainHandle(objectModel);
Request request = ObjectModelHelper.getRequest(objectModel);
String selectedTimeFilter = request.getParameter("time_filter");
StringBuilder actionPath = new StringBuilder().append(request.getContextPath());
if(dso != null){
actionPath.append("/handle/").append(dso.getHandle());
}
actionPath.append("/search-statistics");
Division mainDivision = body.addInteractiveDivision("search-statistics", actionPath.toString(), Division.METHOD_POST, null);
if(dso != null){
mainDivision.setHead(T_search_head_dso.parameterize(dso.getName()));
}else{
mainDivision.setHead(T_search_head);
}
try {
//Add the time filter box
Division searchTermsDivision = mainDivision.addDivision("search-terms");
searchTermsDivision.setHead(T_search_terms_head);
addTimeFilter(searchTermsDivision);
//Retrieve the optional time filter
StatisticsSolrDateFilter dateFilter = getDateFilter(selectedTimeFilter);
StatisticsTable statisticsTable = new StatisticsTable(new StatisticsDataSearches(dso));
DatasetSearchGenerator queryGenerator = new DatasetSearchGenerator();
queryGenerator.setType("query");
queryGenerator.setMax(10);
queryGenerator.setMode(DatasetSearchGenerator.Mode.SEARCH_OVERVIEW);
queryGenerator.setPercentage(true);
queryGenerator.setRetrievePageViews(true);
statisticsTable.addDatasetGenerator(queryGenerator);
if(dateFilter != null){
statisticsTable.addFilter(dateFilter);
}
addDisplayTable(searchTermsDivision, statisticsTable, true, null);
Division totalDivision = mainDivision.addDivision("search-total");
totalDivision.setHead(T_search_total_head);
statisticsTable = new StatisticsTable(new StatisticsDataSearches(dso));
queryGenerator = new DatasetSearchGenerator();
queryGenerator.setMode(DatasetSearchGenerator.Mode.SEARCH_OVERVIEW_TOTAL);
queryGenerator.setPercentage(true);
queryGenerator.setRetrievePageViews(true);
statisticsTable.addDatasetGenerator(queryGenerator);
if(dateFilter != null){
statisticsTable.addFilter(dateFilter);
}
addDisplayTable(totalDivision, statisticsTable, false, null);
} catch (Exception e) {
mainDivision.addPara().addContent(T_retrieval_error);
}
}
@Override
protected Message getNoResultsMessage() {
return T_no_results;
}
}

View File

@@ -326,7 +326,7 @@ public class StatisticsTransformer extends AbstractDSpaceTransformer {
if (dataset != null) { if (dataset != null) {
String[][] matrix = dataset.getMatrixFormatted(); String[][] matrix = dataset.getMatrix();
/** Generate Table */ /** Generate Table */
Division wrapper = mainDiv.addDivision("tablewrapper"); Division wrapper = mainDiv.addDivision("tablewrapper");
@@ -339,12 +339,12 @@ public class StatisticsTransformer extends AbstractDSpaceTransformer {
/** Generate Header Row */ /** Generate Header Row */
Row headerRow = table.addRow(); Row headerRow = table.addRow();
headerRow.addCell("spacer", Cell.ROLE_DATA, "labelcell"); headerRow.addCell("spacer", Cell.ROLE_HEADER, "labelcell");
String[] cLabels = dataset.getColLabels().toArray(new String[0]); String[] cLabels = dataset.getColLabels().toArray(new String[0]);
for (int row = 0; row < cLabels.length; row++) { for (int row = 0; row < cLabels.length; row++) {
Cell cell = headerRow.addCell(0 + "-" + row + "-h", Cell cell = headerRow.addCell(0 + "-" + row + "-h",
Cell.ROLE_DATA, "labelcell"); Cell.ROLE_HEADER, "labelcell");
cell.addContent(cLabels[row]); cell.addContent(cLabels[row]);
} }
@@ -382,7 +382,7 @@ public class StatisticsTransformer extends AbstractDSpaceTransformer {
if (dataset != null) { if (dataset != null) {
String[][] matrix = dataset.getMatrixFormatted(); String[][] matrix = dataset.getMatrix();
// String[] rLabels = dataset.getRowLabels().toArray(new String[0]); // String[] rLabels = dataset.getRowLabels().toArray(new String[0]);
@@ -395,9 +395,9 @@ public class StatisticsTransformer extends AbstractDSpaceTransformer {
Row headerRow = table.addRow(); Row headerRow = table.addRow();
headerRow.addCell("", Cell.ROLE_DATA, "labelcell"); headerRow.addCell("", Cell.ROLE_HEADER, "labelcell");
headerRow.addCell("", Cell.ROLE_DATA, "labelcell").addContent(message(T_head_visits_views)); headerRow.addCell("", Cell.ROLE_HEADER, "labelcell").addContent(message(T_head_visits_views));
/** Generate Table Body */ /** Generate Table Body */
for (int col = 0; col < matrix[0].length; col++) { for (int col = 0; col < matrix[0].length; col++) {

View File

@@ -0,0 +1,126 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.xmlui.aspect.statistics;
import org.apache.cocoon.ProcessingException;
import org.apache.cocoon.environment.ObjectModelHelper;
import org.apache.cocoon.environment.Request;
import org.dspace.app.util.Util;
import org.dspace.app.xmlui.utils.HandleUtil;
import org.dspace.app.xmlui.utils.UIException;
import org.dspace.app.xmlui.wing.Message;
import org.dspace.app.xmlui.wing.WingException;
import org.dspace.app.xmlui.wing.element.*;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject;
import org.dspace.statistics.content.DatasetTypeGenerator;
import org.dspace.statistics.content.StatisticsDataWorkflow;
import org.dspace.statistics.content.StatisticsTable;
import org.dspace.statistics.content.filter.StatisticsSolrDateFilter;
import org.xml.sax.SAXException;
import java.io.IOException;
import java.sql.SQLException;
/**
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public class StatisticsWorkflowTransformer extends AbstractStatisticsDataTransformer {
private static final Message T_dspace_home = message("xmlui.general.dspace_home");
private static final Message T_trail = message("xmlui.statistics.trail-workflow");
private static final Message T_head_title = message("xmlui.statistics.workflow.title");
private static final Message T_title = message("xmlui.statistics.workflow.title");
private static final Message T_retrieval_error = message("xmlui.statistics.workflow.error");
private static final Message T_no_results = message("xmlui.statistics.workflow.no-results");
private static final Message T_workflow_head = message("xmlui.statistics.workflow.head");
private static final Message T_workflow_head_dso = message("xmlui.statistics.workflow.head-dso");
/**
* Add a page title and trail links
*/
public void addPageMeta(PageMeta pageMeta) throws SAXException, WingException, UIException, SQLException, IOException, AuthorizeException {
//Try to find our dspace object
DSpaceObject dso = HandleUtil.obtainHandle(objectModel);
pageMeta.addTrailLink(contextPath + "/",T_dspace_home);
if(dso != null)
{
HandleUtil.buildHandleTrail(dso, pageMeta, contextPath);
}
pageMeta.addTrailLink(contextPath + (dso != null && dso.getHandle() != null ? "/handle/" + dso.getHandle() : "") + "/workflow-statistics", T_trail);
// Add the page title
pageMeta.addMetadata("title").addContent(T_head_title);
}
@Override
public void addBody(Body body) throws SAXException, WingException, SQLException, IOException, AuthorizeException, ProcessingException {
//Try to find our dspace object
DSpaceObject dso = HandleUtil.obtainHandle(objectModel);
Request request = ObjectModelHelper.getRequest(objectModel);
String selectedTimeFilter = request.getParameter("time_filter");
StringBuilder actionPath = new StringBuilder().append(contextPath);
if(dso != null){
actionPath.append("/handle/").append(dso.getHandle());
}
actionPath.append("/workflow-statistics");
Division mainDivision = body.addInteractiveDivision("workflow-statistics", actionPath.toString(), Division.METHOD_POST, null);
if(dso != null){
mainDivision.setHead(T_workflow_head_dso.parameterize(dso.getName()));
}else{
mainDivision.setHead(T_workflow_head);
}
try {
//Add the time filter box
Division workflowTermsDivision = mainDivision.addDivision("workflow-terms");
workflowTermsDivision.setHead(T_title);
addTimeFilter(workflowTermsDivision);
//Retrieve the optional time filter
StatisticsSolrDateFilter dateFilter = getDateFilter(selectedTimeFilter);
int time_filter = -1;
if(request.getParameter("time_filter") != null && !"".equals(request.getParameter("time_filter"))){
//Our time filter is a negative value if present
time_filter = Math.abs(Util.getIntParameter(request, "time_filter"));
}
StatisticsTable statisticsTable = new StatisticsTable(new StatisticsDataWorkflow(dso, time_filter));
DatasetTypeGenerator queryGenerator = new DatasetTypeGenerator();
//Set our type to previousworkflow step (indicates our performed actions !)
queryGenerator.setType("previousWorkflowStep");
queryGenerator.setMax(10);
statisticsTable.addDatasetGenerator(queryGenerator);
if(dateFilter != null){
statisticsTable.addFilter(dateFilter);
}
addDisplayTable(workflowTermsDivision, statisticsTable, true, new String[]{"xmlui.statistics.display.table.workflow.step."});
} catch (Exception e) {
mainDivision.addPara().addContent(T_retrieval_error);
}
}
@Override
protected Message getNoResultsMessage() {
return T_no_results;
}
}

View File

@@ -0,0 +1,104 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.xmlui.cocoon;
import org.apache.avalon.framework.parameters.Parameters;
import org.apache.cocoon.acting.AbstractAction;
import org.apache.cocoon.environment.ObjectModelHelper;
import org.apache.cocoon.environment.Redirector;
import org.apache.cocoon.environment.Request;
import org.apache.cocoon.environment.SourceResolver;
import org.apache.commons.lang.StringUtils;
import org.dspace.app.xmlui.utils.ContextUtil;
import org.dspace.app.xmlui.utils.HandleUtil;
import org.dspace.content.DSpaceObject;
import org.dspace.core.Context;
import org.dspace.handle.HandleManager;
import org.dspace.usage.UsageEvent;
import org.dspace.usage.UsageSearchEvent;
import org.dspace.utils.DSpace;
import java.sql.SQLException;
import java.util.List;
import java.util.Map;
/**
* Fires an event each time a search occurs
*
* @author Kevin Van de Velde (kevin at atmire dot com)
* @author Ben Bosman (ben at atmire dot com)
* @author Mark Diggory (markd at atmire dot com)
*/
public abstract class SearchLoggerAction extends AbstractAction {
@Override
public Map act(Redirector redirector, SourceResolver resolver, Map objectModel, String source, Parameters parameters) throws Exception {
Request request = ObjectModelHelper.getRequest(objectModel);
Context context = ContextUtil.obtainContext(objectModel);
DSpaceObject scope = getScope(context, objectModel);
UsageSearchEvent searchEvent = new UsageSearchEvent(
UsageEvent.Action.SEARCH,
request,
context,
null, getQueries(request), scope);
if(!StringUtils.isBlank(request.getParameter("rpp"))){
searchEvent.setRpp(Integer.parseInt(request.getParameter("rpp")));
}
if(!StringUtils.isBlank(request.getParameter("sort_by"))){
searchEvent.setSortBy(request.getParameter("sort_by"));
}
if(!StringUtils.isBlank(request.getParameter("order"))){
searchEvent.setSortOrder(request.getParameter("order"));
}
if(!StringUtils.isBlank(request.getParameter("page"))){
searchEvent.setPage(Integer.parseInt(request.getParameter("page")));
}
//Fire our event
new DSpace().getEventService().fireEvent(searchEvent);
// Finished, allow to pass.
return null;
}
protected abstract List<String> getQueries(Request request) throws SQLException;
/**
* Determine the current scope. This may be derived from the current url
* handle if present or the scope parameter is given. If no scope is
* specified then null is returned.
*
* @return The current scope.
*/
protected DSpaceObject getScope(Context context, Map objectModel) throws SQLException
{
Request request = ObjectModelHelper.getRequest(objectModel);
String scopeString = request.getParameter("scope");
// Are we in a community or collection?
DSpaceObject dso;
if (scopeString == null || "".equals(scopeString))
{
// get the search scope from the url handle
dso = HandleUtil.obtainHandle(objectModel);
}
else
{
// Get the search scope from the location parameter
dso = HandleManager.resolveToObject(context, scopeString);
}
return dso;
}
}

View File

@@ -51,6 +51,10 @@ and searching the repository.
<map:selector name="IfModifiedSinceSelector" <map:selector name="IfModifiedSinceSelector"
src="org.dspace.app.xmlui.aspect.general.IfModifiedSinceSelector"/> src="org.dspace.app.xmlui.aspect.general.IfModifiedSinceSelector"/>
</map:selectors> </map:selectors>
<map:actions>
<map:action name="SimpleSearchLoggerAction" src="org.dspace.app.xmlui.aspect.artifactbrowser.SimpleSearchLoggerAction" />
<map:action name="AdvancedSearchLoggerAction" src="org.dspace.app.xmlui.aspect.artifactbrowser.AdvancedSearchLoggerAction" />
</map:actions>
</map:components> </map:components>
<map:pipelines> <map:pipelines>
@@ -83,16 +87,19 @@ and searching the repository.
<!-- Search --> <!-- Search -->
<map:match pattern="search"> <map:match pattern="search">
<map:transform type="SimpleSearch"/> <map:act type="SimpleSearchLoggerAction"/>
<map:serialize type="xml"/> <map:transform type="SimpleSearch"/>
<map:serialize type="xml"/>
</map:match> </map:match>
<map:match pattern="simple-search"> <map:match pattern="simple-search">
<map:transform type="SimpleSearch"/> <map:act type="SimpleSearchLoggerAction"/>
<map:serialize type="xml"/> <map:transform type="SimpleSearch"/>
<map:serialize type="xml"/>
</map:match> </map:match>
<map:match pattern="advanced-search"> <map:match pattern="advanced-search">
<map:transform type="AdvancedSearch"/> <map:act type="AdvancedSearchLoggerAction"/>
<map:serialize type="xml"/> <map:transform type="AdvancedSearch"/>
<map:serialize type="xml"/>
</map:match> </map:match>
@@ -119,20 +126,23 @@ and searching the repository.
<!-- Simple search --> <!-- Simple search -->
<map:match pattern="handle/*/*/search"> <map:match pattern="handle/*/*/search">
<map:transform type="SimpleSearch"/> <map:act type="SimpleSearchLoggerAction"/>
<map:serialize type="xml"/> <map:transform type="SimpleSearch"/>
<map:serialize type="xml"/>
</map:match> </map:match>
<!-- Simple search again, but this time they said it --> <!-- Simple search again, but this time they said it -->
<map:match pattern="handle/*/*/simple-search"> <map:match pattern="handle/*/*/simple-search">
<map:transform type="SimpleSearch"/> <map:act type="SimpleSearchLoggerAction"/>
<map:serialize type="xml"/> <map:transform type="SimpleSearch"/>
<map:serialize type="xml"/>
</map:match> </map:match>
<!-- Advanced search --> <!-- Advanced search -->
<map:match pattern="handle/*/*/advanced-search"> <map:match pattern="handle/*/*/advanced-search">
<map:transform type="AdvancedSearch"/> <map:act type="AdvancedSearchLoggerAction"/>
<map:serialize type="xml"/> <map:transform type="AdvancedSearch"/>
<map:serialize type="xml"/>
</map:match> </map:match>
</map:match> <!-- End match community or collection --> </map:match> <!-- End match community or collection -->

View File

@@ -1,81 +1,289 @@
<?xml version="1.0"?> <?xml version="1.0"?>
<!-- <!--
The contents of this file are subject to the license and copyright The contents of this file are subject to the license and copyright
detailed in the LICENSE and NOTICE files at the root of the source detailed in the LICENSE and NOTICE files at the root of the source
tree and available online at tree and available online at
http://www.dspace.org/license/ http://www.dspace.org/license/
--> -->
<map:sitemap xmlns:map="http://apache.org/cocoon/sitemap/1.0"> <map:sitemap xmlns:map="http://apache.org/cocoon/sitemap/1.0">
<map:components> <map:components>
<map:transformers> <map:transformers>
<map:transformer name="StatisticsTransformer" src="org.dspace.app.xmlui.aspect.statistics.StatisticsTransformer"/> <map:transformer name="StatisticsTransformer" src="org.dspace.app.xmlui.aspect.statistics.StatisticsTransformer"/>
<map:transformer name="Navigation" src="org.dspace.app.xmlui.aspect.statistics.Navigation"/> <map:transformer name="Navigation" src="org.dspace.app.xmlui.aspect.statistics.Navigation"/>
<map:transformer name="RestrictedItem" src="org.dspace.app.xmlui.aspect.artifactbrowser.RestrictedItem"/> <map:transformer name="RestrictedItem" src="org.dspace.app.xmlui.aspect.artifactbrowser.RestrictedItem"/>
</map:transformers> <map:transformer name="StatisticsSearchResultTransformer" src="org.dspace.app.xmlui.aspect.statistics.StatisticsSearchResultTransformer"/>
<map:matchers default="wildcard"> <map:transformer name="StatisticsSearchTransformer" src="org.dspace.app.xmlui.aspect.statistics.StatisticsSearchTransformer"/>
<map:matcher name="HandleTypeMatcher" src="org.dspace.app.xmlui.aspect.general.HandleTypeMatcher"/> <map:transformer name="StatisticsWorkflowTransformer" src="org.dspace.app.xmlui.aspect.statistics.StatisticsWorkflowTransformer"/>
<map:matcher name="StatisticsAuthorizedMatcher" src="org.dspace.app.xmlui.aspect.statistics.StatisticsAuthorizedMatcher"/> </map:transformers>
</map:matchers> <map:matchers default="wildcard">
<map:selectors> <map:matcher name="HandleAuthorizedMatcher" src="org.dspace.app.xmlui.aspect.general.HandleAuthorizedMatcher"/>
<map:selector name="AuthenticatedSelector" src="org.dspace.app.xmlui.aspect.general.AuthenticatedSelector"/> <map:matcher name="HandleTypeMatcher" src="org.dspace.app.xmlui.aspect.general.HandleTypeMatcher"/>
</map:selectors> <map:matcher name="StatisticsAuthorizedMatcher" src="org.dspace.app.xmlui.aspect.statistics.StatisticsAuthorizedMatcher"/>
</map:components> </map:matchers>
<map:selectors>
<map:pipelines> <map:selector name="AuthenticatedSelector" src="org.dspace.app.xmlui.aspect.general.AuthenticatedSelector"/>
<map:pipeline> </map:selectors>
<map:actions>
<map:generate/> <map:action name="SearchResultLogAction" src="org.dspace.app.xmlui.aspect.statistics.SearchResultLogAction" />
</map:actions>
<!--Make sure that the home page also receives statistics--> </map:components>
<map:match pattern="">
<map:match type="StatisticsAuthorizedMatcher" pattern="READ"> <map:pipelines>
<map:transform type="Navigation"/> <map:pipeline>
</map:match>
</map:match> <map:generate/>
<!--Home page statistics--> <!--Make sure that the home page also receives statistics-->
<map:match pattern="statistics-home"> <map:match pattern="">
<map:match type="StatisticsAuthorizedMatcher" pattern="READ"> <map:match type="StatisticsAuthorizedMatcher" pattern="READ">
<map:transform type="StatisticsTransformer"/> <map:parameter name="type" value="usage,search,workflow"/>
</map:match>
</map:match> <map:transform type="Navigation"/>
</map:match>
<!--Only show the stats link if we have read rights--> </map:match>
<map:match pattern="handle/*/**">
<map:match type="StatisticsAuthorizedMatcher" pattern="READ"> <!--Home page statistics-->
<map:transform type="Navigation"/> <map:match pattern="statistics-home">
</map:match> <map:match type="StatisticsAuthorizedMatcher" pattern="READ">
</map:match> <map:parameter name="type" value="usage"/>
<!--Match our statistics--> <map:transform type="StatisticsTransformer"/>
<map:match pattern="handle/*/*/statistics"> <map:transform type="Navigation"/>
<map:match type="StatisticsAuthorizedMatcher" pattern="READ"> </map:match>
<map:transform type="StatisticsTransformer"/> </map:match>
</map:match>
<map:match pattern="search-statistics">
<map:match type="StatisticsAuthorizedMatcher" pattern="!READ"> <map:match type="StatisticsAuthorizedMatcher" pattern="READ">
<map:select type="AuthenticatedSelector"> <map:parameter name="type" value="search"/>
<map:when test="eperson">
<map:transform type="RestrictedItem"/> <map:transform type="IncludePageMeta">
<map:serialize/> <map:parameter name="javascript.static.statistics#1" value="loadJQuery.js"/>
</map:when> <map:parameter name="javascript.static.statistics#2" value="static/js/usage-statistics.js"/>
<map:otherwise> </map:transform>
<map:act type="StartAuthentication"> <map:transform type="StatisticsSearchTransformer"/>
<map:parameter name="header" value="xmlui.ArtifactBrowser.RestrictedItem.auth_header"/> <map:transform type="Navigation"/>
<map:parameter name="message" value="xmlui.ArtifactBrowser.RestrictedItem.auth_message"/> </map:match>
</map:act> </map:match>
<map:serialize/>
</map:otherwise>
</map:select> <map:match pattern="workflow-statistics">
</map:match> <map:match type="StatisticsAuthorizedMatcher" pattern="READ">
</map:match> <map:parameter name="type" value="workflow"/>
<map:transform type="IncludePageMeta">
<map:serialize type="xml"/> <map:parameter name="javascript.static.statistics#1" value="loadJQuery.js"/>
<map:parameter name="javascript.static.statistics#2" value="static/js/usage-statistics.js"/>
</map:pipeline> </map:transform>
</map:pipelines> <map:transform type="StatisticsWorkflowTransformer"/>
</map:sitemap> <map:transform type="Navigation"/>
</map:match>
</map:match>
<!--Only show the stats link if we have read rights-->
<map:match pattern="handle/*/*">
<map:match type="StatisticsAuthorizedMatcher" pattern="READ">
<map:parameter name="type" value="usage,search,workflow"/>
<map:transform type="Navigation"/>
</map:match>
</map:match>
<!--Match our statistics-->
<map:match pattern="handle/*/*/statistics">
<map:match type="StatisticsAuthorizedMatcher" pattern="READ">
<map:parameter name="type" value="usage"/>
<map:transform type="StatisticsTransformer"/>
<map:transform type="Navigation"/>
</map:match>
<map:match type="StatisticsAuthorizedMatcher" pattern="!READ">
<map:parameter name="type" value="usage"/>
<map:select type="AuthenticatedSelector">
<map:when test="eperson">
<map:transform type="RestrictedItem"/>
<map:serialize/>
</map:when>
<map:otherwise>
<map:act type="StartAuthentication">
<map:parameter name="header" value="xmlui.ArtifactBrowser.RestrictedItem.auth_header"/>
<map:parameter name="message" value="xmlui.ArtifactBrowser.RestrictedItem.auth_message"/>
</map:act>
<map:serialize/>
</map:otherwise>
</map:select>
</map:match>
</map:match>
<!--Match for search statistics-->
<map:match pattern="handle/*/*/search-statistics">
<map:match type="StatisticsAuthorizedMatcher" pattern="READ">
<map:parameter name="type" value="search"/>
<map:transform type="IncludePageMeta">
<map:parameter name="javascript.static.statistics#1" value="loadJQuery.js"/>
<map:parameter name="javascript.static.statistics#2" value="static/js/usage-statistics.js"/>
</map:transform>
<map:transform type="StatisticsSearchTransformer"/>
<map:transform type="Navigation"/>
</map:match>
<map:match type="StatisticsAuthorizedMatcher" pattern="!READ">
<map:parameter name="type" value="search"/>
<map:select type="AuthenticatedSelector">
<map:when test="eperson">
<map:transform type="RestrictedItem"/>
<map:serialize/>
</map:when>
<map:otherwise>
<map:act type="StartAuthentication">
<map:parameter name="header" value="xmlui.ArtifactBrowser.RestrictedItem.auth_header"/>
<map:parameter name="message" value="xmlui.ArtifactBrowser.RestrictedItem.auth_message"/>
</map:act>
<map:serialize/>
</map:otherwise>
</map:select>
</map:match>
</map:match>
<map:match pattern="handle/*/*/workflow-statistics">
<map:match type="StatisticsAuthorizedMatcher" pattern="READ">
<map:parameter name="type" value="workflow"/>
<map:transform type="IncludePageMeta">
<map:parameter name="javascript.static.statistics#1" value="loadJQuery.js"/>
<map:parameter name="javascript.static.statistics#2" value="static/js/usage-statistics.js"/>
</map:transform>
<map:transform type="StatisticsWorkflowTransformer"/>
<map:transform type="Navigation"/>
</map:match>
<map:match type="StatisticsAuthorizedMatcher" pattern="!READ">
<map:parameter name="type" value="workflow"/>
<map:select type="AuthenticatedSelector">
<map:when test="eperson">
<map:transform type="RestrictedItem"/>
<map:serialize/>
</map:when>
<map:otherwise>
<map:act type="StartAuthentication">
<map:parameter name="header" value="xmlui.ArtifactBrowser.RestrictedItem.auth_header"/>
<map:parameter name="message" value="xmlui.ArtifactBrowser.RestrictedItem.auth_message"/>
</map:act>
<map:serialize/>
</map:otherwise>
</map:select>
</map:match>
</map:match>
<map:match pattern="dso-display">
<map:act type="SearchResultLogAction"/>
</map:match>
<map:match pattern="search">
<map:transform type="IncludePageMeta">
<map:parameter name="javascript.static.statistics#1" value="loadJQuery.js"/>
<map:parameter name="javascript.static.statistics#2" value="static/js/search-results.js"/>
</map:transform>
<map:transform type="StatisticsSearchResultTransformer">
<map:parameter name="advanced-search" value="false"/>
</map:transform>
</map:match>
<map:match pattern="simple-search">
<map:transform type="IncludePageMeta">
<map:parameter name="javascript.static.statistics#1" value="loadJQuery.js"/>
<map:parameter name="javascript.static.statistics#2" value="static/js/search-results.js"/>
</map:transform>
<map:transform type="StatisticsSearchResultTransformer">
<map:parameter name="advanced-search" value="false"/>
</map:transform>
</map:match>
<map:match pattern="advanced-search">
<map:transform type="IncludePageMeta">
<map:parameter name="javascript.static.statistics#1" value="loadJQuery.js"/>
<map:parameter name="javascript.static.statistics#2" value="static/js/search-results.js"/>
</map:transform>
<map:transform type="StatisticsSearchResultTransformer">
<map:parameter name="advanced-search" value="true"/>
</map:transform>
</map:match>
<map:match pattern="discover">
<map:transform type="IncludePageMeta">
<map:parameter name="javascript.static.statistics#100" value="static/js/discovery/discovery-results.js"/>
</map:transform>
</map:match>
<!-- Handle specific features -->
<map:match pattern="handle/*/**">
<!-- Scoped browse by features -->
<map:match type="HandleAuthorizedMatcher" pattern="READ">
<map:match type="HandleTypeMatcher" pattern="community,collection">
<map:match pattern="handle/*/*/dso-display">
<map:act type="SearchResultLogAction"/>
</map:match>
<!-- Simple search -->
<map:match pattern="handle/*/*/search">
<map:transform type="IncludePageMeta">
<map:parameter name="javascript.static.statistics#1" value="loadJQuery.js"/>
<map:parameter name="javascript.static.statistics#2" value="static/js/search-results.js"/>
</map:transform>
<map:transform type="StatisticsSearchResultTransformer">
<map:parameter name="advanced-search" value="false"/>
</map:transform>
</map:match>
<!-- Simple search again, but this time they said it -->
<map:match pattern="handle/*/*/simple-search">
<map:transform type="IncludePageMeta">
<map:parameter name="javascript.static.statistics#1" value="loadJQuery.js"/>
<map:parameter name="javascript.static.statistics#2" value="static/js/search-results.js"/>
</map:transform>
<map:transform type="StatisticsSearchResultTransformer">
<map:parameter name="advanced-search" value="false"/>
</map:transform>
</map:match>
<!-- Advanced search -->
<map:match pattern="handle/*/*/advanced-search">
<map:transform type="IncludePageMeta">
<map:parameter name="javascript.static.statistics#1" value="loadJQuery.js"/>
<map:parameter name="javascript.static.statistics#2" value="static/js/search-results.js"/>
</map:transform>
<map:transform type="StatisticsSearchResultTransformer">
<map:parameter name="advanced-search" value="true"/>
</map:transform>
</map:match>
<!--Optional discovery statistics-->
<map:match pattern="handle/*/*/discover">
<map:transform type="IncludePageMeta">
<map:parameter name="javascript.static.statistics#100" value="static/js/discovery/discovery-results.js"/>
</map:transform>
</map:match>
</map:match>
<!-- End match community or collection -->
</map:match>
<!-- End match can read -->
</map:match>
<!-- End match handle/*/** -->
<map:serialize type="xml"/>
</map:pipeline>
</map:pipelines>
</map:sitemap>

View File

@@ -1898,6 +1898,12 @@
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!--> !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!-->
<message key="xmlui.statistics.title">Statistics</message> <message key="xmlui.statistics.title">Statistics</message>
<message key="xmlui.statistics.search.title">Search Statistics</message>
<message key="xmlui.statistics.search.head">Search Statistics</message>
<message key="xmlui.statistics.search.head-dso">Search Statistics for {0}</message>
<message key="xmlui.statistics.search.error">There was an error while generating the search statistics, please try again later.</message>
<message key="xmlui.statistics.search.no-results">No search statistics available for the selected period.</message>
<message key="xmlui.statistics.workflow.title">Workflow Statistics</message>
<message key="xmlui.statistics.visits.total">Total Visits</message> <message key="xmlui.statistics.visits.total">Total Visits</message>
<message key="xmlui.statistics.visits.month">Total Visits Per Month</message> <message key="xmlui.statistics.visits.month">Total Visits Per Month</message>
<message key="xmlui.statistics.visits.views">Views</message> <message key="xmlui.statistics.visits.views">Views</message>
@@ -1905,8 +1911,53 @@
<message key="xmlui.statistics.visits.cities">Top cities views</message> <message key="xmlui.statistics.visits.cities">Top cities views</message>
<message key="xmlui.statistics.visits.bitstreams">File Visits</message> <message key="xmlui.statistics.visits.bitstreams">File Visits</message>
<message key="xmlui.statistics.Navigation.title">Statistics</message> <message key="xmlui.statistics.Navigation.title">Statistics</message>
<message key="xmlui.statistics.Navigation.view">View Statistics</message> <message key="xmlui.statistics.Navigation.usage.view">View Usage Statistics</message>
<message key="xmlui.statistics.Navigation.search.view">View Search Statistics</message>
<message key="xmlui.statistics.Navigation.workflow.view">View Workflow Statistics</message>
<message key="xmlui.statistics.trail">Statistics</message> <message key="xmlui.statistics.trail">Statistics</message>
<message key="xmlui.statistics.trail-search">Search Statistics</message>
<message key="xmlui.statistics.trail-workflow">Workflow Statistics</message>
<message key="xmlui.statistics.workflow.no-results">No workflow statistics available for the selected period.</message>
<message key="xmlui.statistics.workflow.error">There was an error while generating the workflow statistics, please try again later.</message>
<message key="xmlui.statistics.workflow.head">Workflow Statistics</message>
<message key="xmlui.statistics.workflow.head-dso">Workflow Statistics for {0}</message>
<message key="xmlui.statistics.StatisticsSearchTransformer.search-terms.head">Top Search Terms</message>
<message key="xmlui.statistics.StatisticsSearchTransformer.search-total.head">Total</message>
<message key="xmlui.statistics.StatisticsSearchTransformer.time-filter.last-month">Previous month</message>
<message key="xmlui.statistics.StatisticsSearchTransformer.time-filter.last-6-months">Previous 6 months</message>
<message key="xmlui.statistics.StatisticsSearchTransformer.time-filter.last-year">Previous year</message>
<message key="xmlui.statistics.StatisticsSearchTransformer.time-filter.overall">Overall</message>
<message key="xmlui.statistics.display.table.column-label.search-terms">Search Term</message>
<message key="xmlui.statistics.display.table.column-label.searches">Searches</message>
<message key="xmlui.statistics.display.table.column-label.percent-total">% of Total</message>
<message key="xmlui.statistics.display.table.column-label.views-search">Pageviews / Search</message>
<message key="xmlui.statistics.display.table.column-label.step">Step</message>
<message key="xmlui.statistics.display.table.column-label.performed">Performed</message>
<message key="xmlui.statistics.display.table.column-label.average">Average</message>
<message key="xmlui.statistics.display.table.workflow.step.STEP1POOL">Accept/Reject Step Pool</message>
<message key="xmlui.statistics.display.table.workflow.step.STEP1">Accept/Reject Step</message>
<message key="xmlui.statistics.display.table.workflow.step.STEP2POOL">Accept/Reject/Edit Metadata Step Pool</message>
<message key="xmlui.statistics.display.table.workflow.step.STEP2">Accept/Reject/Edit Metadata Step</message>
<message key="xmlui.statistics.display.table.workflow.step.STEP3POOL">Edit Metadata Step Pool</message>
<message key="xmlui.statistics.display.table.workflow.step.STEP3">Edit Metadata Step</message>
<message key="xmlui.statistics.display.table.workflow.step.default.reviewstep.claimaction">Accept/Reject Step Pool</message>
<message key="xmlui.statistics.display.table.workflow.step.default.reviewstep.reviewaction">Accept/Reject Step</message>
<message key="xmlui.statistics.display.table.workflow.step.default.editstep.claimaction">Accept/Reject/Edit Metadata Step Pool</message>
<message key="xmlui.statistics.display.table.workflow.step.default.editstep.editaction">Accept/Reject/Edit Metadata Step</message>
<message key="xmlui.statistics.display.table.workflow.step.default.finaleditstep.claimaction">Edit Metadata Step Pool</message>
<message key="xmlui.statistics.display.table.workflow.step.default.finaleditstep.finaleditaction">Edit Metadata Step</message>
<message key="xmlui.statistics.display.table.workflow.step.default.finaleditstep.finaleditaction">Edit Metadata Step</message>
<message key="xmlui.statistics.display.table.workflow.step.scoreReview.scoreReviewStep.claimaction">Score Review Pool</message>
<message key="xmlui.statistics.display.table.workflow.step.scoreReview.scoreReviewStep.scorereviewaction">Score Review</message>
<message key="xmlui.statistics.display.table.workflow.step.scoreReview.evaluationStep.evaluationaction">Score Review Evaluation</message>
<message key="xmlui.statistics.display.table.workflow.step.scoreReview.evaluationStep.noUserSelectionAction">Score Review Evaluation Configuration</message>
<message key="xmlui.statistics.display.table.workflow.step.selectSingleReviewer.selectReviewerStep.claimaction">Single User Review Pool</message>
<message key="xmlui.statistics.display.table.workflow.step.selectSingleReviewer.singleUserReviewStep.autoassignAction">Single User Auto Assign Action</message>
<message key="xmlui.statistics.display.table.workflow.step.selectSingleReviewer.singleUserReviewStep.singleuserreviewaction">Single User Review Action</message>

View File

@@ -0,0 +1,32 @@
/*
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
(function ($) {
/**
* Function ensures that all the links clicked in our results pass through the internal logging mechanism
*/
$(document).ready(function() {
//Retrieve all links with handles attached (comm/coll/item links)
var urls = $('div#aspect_artifactbrowser_SimpleSearch_div_search-results,' +
'div#aspect_artifactbrowser_AdvancedSearch_div_search-results').find('a');
urls.click(function(){
var $this = $(this);
//Instead of redirecting us to the page, first send us to the statistics logger
//By doing this we ensure that we register the query to the result
var form = $('form#aspect_statistics_StatisticsSearchResultTransformer_div_dso-display');
form.find('input[name="redirectUrl"]').val($this.attr('href'));
form.submit();
return false;
});
});
})(jQuery);

View File

@@ -0,0 +1,20 @@
/*
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
(function ($) {
/**
* Function ensures that when a new time filter is selected the form is submitted
*/
$(document).ready(function() {
$('select[name="time_filter"]').change(function(){
$(this).parents('form:first').submit();
});
});
})(jQuery);

View File

@@ -1237,6 +1237,20 @@ div.vocabulary-container li.error{
border: 1px solid lightgray; border: 1px solid lightgray;
} }
table.detailtable {
background-color: #D8E8EB;
}
table.detailtable th{
background-color: #F0F2F5;
word-wrap: normal;
}
table.detailtable td{
background-color: #FFFFFF;
text-align: right;
}
span.highlight{ span.highlight{
font-weight: bold; font-weight: bold;

View File

@@ -13,7 +13,12 @@ resolver.timeout = 200
# view the statistics. # view the statistics.
# If disabled, anyone with READ permissions on the DSpaceObject will be able # If disabled, anyone with READ permissions on the DSpaceObject will be able
# to view the statistics. # to view the statistics.
authorization.admin=true #View/download statistics
authorization.admin.usage=true
#Search/search result statistics
authorization.admin.search=true
#Workflow result statistics
authorization.admin.workflow=true
# Enable/disable logging of spiders in solr statistics. # Enable/disable logging of spiders in solr statistics.
# If false, and IP matches an address in spiderips.urls, event is not logged. # If false, and IP matches an address in spiderips.urls, event is not logged.

View File

@@ -21,6 +21,27 @@
<plugin> <plugin>
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-war-plugin</artifactId> <artifactId>maven-war-plugin</artifactId>
<!--Upgraded version required since we need to use the exclude option !-->
<version>2.1-alpha-2</version>
<configuration>
<!-- This 'dspace-solr' WAR overlays the Apache Solr Web Application
available in Maven Central -->
<overlays>
<overlay>
<groupId>org.apache.solr</groupId>
<artifactId>solr</artifactId>
<!--
Exclude the solr core named apache-solr-core, this is needed because the dspace-solr changes
need to take precedence over the solr-core, the solr-core will still be loaded in the solr-core.jar
-->
<excludes>
<exclude>WEB-INF/lib/apache-solr-core-3.5.0.jar</exclude>
<!--Also ensure we use the DSpace solr web.xml file else our localhost filter will not work !-->
<exclude>WEB-INF/web.xml</exclude>
</excludes>
</overlay>
</overlays>
</configuration>
<executions> <executions>
<execution> <execution>
<phase>prepare-package</phase> <phase>prepare-package</phase>
@@ -33,7 +54,7 @@
<dependency> <dependency>
<groupId>org.dspace</groupId> <groupId>org.dspace</groupId>
<artifactId>dspace-solr</artifactId> <artifactId>dspace-solr</artifactId>
<version>3.3.0.0</version> <version>3.5.0.1</version>
<classifier>skinny</classifier> <classifier>skinny</classifier>
<type>war</type> <type>war</type>
</dependency> </dependency>
@@ -41,24 +62,11 @@
<dependency> <dependency>
<groupId>org.dspace</groupId> <groupId>org.dspace</groupId>
<artifactId>dspace-solr</artifactId> <artifactId>dspace-solr</artifactId>
<version>3.3.0.0</version> <version>3.5.0.1</version>
<classifier>classes</classifier> <classifier>classes</classifier>
<type>jar</type> <type>jar</type>
</dependency> </dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>1.5.6</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-jdk14</artifactId>
<version>1.5.6</version>
</dependency>
<!-- support odd cases where JAXP cannot be found in JVM --> <!-- support odd cases where JAXP cannot be found in JVM -->
<dependency> <dependency>
<groupId>xalan</groupId> <groupId>xalan</groupId>

View File

@@ -32,6 +32,15 @@
--> -->
<abortOnConfigurationError>${solr.abortOnConfigurationError:true}</abortOnConfigurationError> <abortOnConfigurationError>${solr.abortOnConfigurationError:true}</abortOnConfigurationError>
<!-- Controls what version of Lucene various components of Solr
adhere to. Generally, you want to use the latest version to
get all bug fixes and improvements. It is highly recommended
that you fully re-index after changing this setting as it can
affect both how text is indexed and queried.
-->
<luceneMatchVersion>LUCENE_35</luceneMatchVersion>
<!-- lib directives can be used to instruct Solr to load an Jars identified <!-- lib directives can be used to instruct Solr to load an Jars identified
and use them to resolve any "plugins" specified in your solrconfig.xml or and use them to resolve any "plugins" specified in your solrconfig.xml or
schema.xml (ie: Analyzers, Request Handlers, etc...). schema.xml (ie: Analyzers, Request Handlers, etc...).
@@ -47,14 +56,21 @@
classpath, this is useful for including all jars in a directory. classpath, this is useful for including all jars in a directory.
--> -->
<lib dir="../../contrib/extraction/lib" /> <lib dir="../../contrib/extraction/lib" />
<!-- When a regex is specified in addition to a directory, only the files in that <lib dir="../../contrib/clustering/lib/" />
directory which completely match the regex (anchored on both ends) <lib dir="../../contrib/velocity/lib" />
will be included.
<!-- When a regex is specified in addition to a directory, only the
files in that directory which completely match the regex
(anchored on both ends) will be included.
--> -->
<lib dir="../../dist/" regex="apache-solr-cell-\d.*\.jar" /> <lib dir="../../dist/" regex="apache-solr-cell-\d.*\.jar" />
<lib dir="../../dist/" regex="apache-solr-clustering-\d.*\.jar" /> <lib dir="../../dist/" regex="apache-solr-clustering-\d.*\.jar" />
<!-- If a dir option (with or without a regex) is used and nothing is found <lib dir="../../dist/" regex="apache-solr-dataimporthandler-\d.*\.jar" />
that matches, it will be ignored <lib dir="../../dist/" regex="apache-solr-langid-\d.*\.jar" />
<lib dir="../../dist/" regex="apache-solr-velocity-\d.*\.jar" />
<!-- If a dir option (with or without a regex) is used and nothing
is found that matches, it will be ignored
--> -->
<lib dir="../../contrib/clustering/lib/downloads/" /> <lib dir="../../contrib/clustering/lib/downloads/" />
<lib dir="../../contrib/clustering/lib/" /> <lib dir="../../contrib/clustering/lib/" />
@@ -71,9 +87,29 @@
<!--<dataDir>${solr.data.dir:./solr/data}</dataDir>--> <!--<dataDir>${solr.data.dir:./solr/data}</dataDir>-->
<!-- WARNING: this <indexDefaults> section only provides defaults for index writers <!-- The DirectoryFactory to use for indexes.
in general. See also the <mainIndex> section after that when changing parameters
for Solr's main Lucene index. --> solr.StandardDirectoryFactory, the default, is filesystem
based and tries to pick the best implementation for the current
JVM and platform. One can force a particular implementation
via solr.MMapDirectoryFactory, solr.NIOFSDirectoryFactory, or
solr.SimpleFSDirectoryFactory.
solr.RAMDirectoryFactory is memory based, not
persistent, and doesn't work with replication.
-->
<directoryFactory name="DirectoryFactory"
class="${solr.directoryFactory:solr.StandardDirectoryFactory}"/>
<!-- Index Defaults
Values here affect all index writers and act as a default
unless overridden.
WARNING: See also the <mainIndex> section below for parameters
that overfor Solr's main Lucene index.
-->
<indexDefaults> <indexDefaults>
<!-- Values here affect all index writers and act as a default unless overridden. --> <!-- Values here affect all index writers and act as a default unless overridden. -->
<useCompoundFile>false</useCompoundFile> <useCompoundFile>false</useCompoundFile>
@@ -87,74 +123,86 @@
for buffering added documents and deletions before they are for buffering added documents and deletions before they are
flushed to the Directory. --> flushed to the Directory. -->
<ramBufferSizeMB>32</ramBufferSizeMB> <ramBufferSizeMB>32</ramBufferSizeMB>
<!-- <maxMergeDocs>2147483647</maxMergeDocs> --> <!-- If both ramBufferSizeMB and maxBufferedDocs is set, then
Lucene will flush based on whichever limit is hit first.
-->
<!-- <maxBufferedDocs>1000</maxBufferedDocs> -->
<maxFieldLength>10000</maxFieldLength> <maxFieldLength>10000</maxFieldLength>
<writeLockTimeout>1000</writeLockTimeout> <writeLockTimeout>1000</writeLockTimeout>
<commitLockTimeout>10000</commitLockTimeout> <commitLockTimeout>10000</commitLockTimeout>
<!-- <!-- Expert: Merge Policy
Expert: Turn on Lucene's auto commit capability. This causes intermediate
segment flushes to write a new lucene index descriptor, enabling it to be The Merge Policy in Lucene controls how merging is handled by
opened by an external IndexReader. This can greatly slow down indexing Lucene. The default in Solr 3.3 is TieredMergePolicy.
speed. NOTE: Despite the name, this value does not have any relation to
Solr's autoCommit functionality The default in 2.3 was the LogByteSizeMergePolicy,
previous versions used LogDocMergePolicy.
LogByteSizeMergePolicy chooses segments to merge based on
their size. The Lucene 2.2 default, LogDocMergePolicy chose
when to merge based on number of documents
Other implementations of MergePolicy must have a no-argument
constructor
--> -->
<!--<luceneAutoCommit>false</luceneAutoCommit>-->
<!-- <!--
Expert: The Merge Policy in Lucene controls how merging is handled by <mergePolicy class="org.apache.lucene.index.TieredMergePolicy"/>
Lucene. The default in 2.3 is the LogByteSizeMergePolicy, previous
versions used LogDocMergePolicy.
LogByteSizeMergePolicy chooses segments to merge based on their size. The
Lucene 2.2 default, LogDocMergePolicy chose when to merge based on number
of documents
Other implementations of MergePolicy must have a no-argument constructor
--> -->
<!--<mergePolicy class="org.apache.lucene.index.LogByteSizeMergePolicy"/>-->
<!-- <!-- Expert: Merge Scheduler
Expert:
The Merge Scheduler in Lucene controls how merges are performed. The The Merge Scheduler in Lucene controls how merges are
ConcurrentMergeScheduler (Lucene 2.3 default) can perform merges in the performed. The ConcurrentMergeScheduler (Lucene 2.3 default)
background using separate threads. The SerialMergeScheduler (Lucene 2.2 can perform merges in the background using separate threads.
default) does not. The SerialMergeScheduler (Lucene 2.2 default) does not.
--> -->
<!--<mergeScheduler class="org.apache.lucene.index.ConcurrentMergeScheduler"/>-->
<!-- <!--
This option specifies which Lucene LockFactory implementation to use. <mergeScheduler class="org.apache.lucene.index.ConcurrentMergeScheduler"/>
-->
single = SingleInstanceLockFactory - suggested for a read-only index <!-- LockFactory
or when there is no possibility of another process trying
to modify the index. This option specifies which Lucene LockFactory implementation
native = NativeFSLockFactory - uses OS native file locking to use.
single = SingleInstanceLockFactory - suggested for a
read-only index or when there is no possibility of
another process trying to modify the index.
native = NativeFSLockFactory - uses OS native file locking.
Do not use when multiple solr webapps in the same
JVM are attempting to share a single index.
simple = SimpleFSLockFactory - uses a plain file for locking simple = SimpleFSLockFactory - uses a plain file for locking
(For backwards compatibility with Solr 1.2, 'simple' is the default (For backwards compatibility with Solr 1.2, 'simple' is the default
if not specified.) if not specified.)
More details on the nuances of each LockFactory...
http://wiki.apache.org/lucene-java/AvailableLockFactories
--> -->
<lockType>native</lockType> <lockType>native</lockType>
<!--
Expert: <!-- Expert: Controls how often Lucene loads terms into memory
Controls how often Lucene loads terms into memory --> Default is 128 and is likely good for most everyone.
-->
<!--<termIndexInterval>256</termIndexInterval>--> <!--<termIndexInterval>256</termIndexInterval>-->
</indexDefaults> </indexDefaults>
<!-- Main Index
Values here override the values in the <indexDefaults> section
for the main on disk index.
-->
<mainIndex> <mainIndex>
<!-- options specific to the main on-disk lucene index --> <!-- options specific to the main on-disk lucene index -->
<useCompoundFile>false</useCompoundFile> <useCompoundFile>false</useCompoundFile>
<ramBufferSizeMB>32</ramBufferSizeMB> <ramBufferSizeMB>32</ramBufferSizeMB>
<mergeFactor>10</mergeFactor> <mergeFactor>10</mergeFactor>
<!-- Deprecated -->
<!--<maxBufferedDocs>1000</maxBufferedDocs>-->
<!--<maxMergeDocs>2147483647</maxMergeDocs>-->
<!-- inherit from indexDefaults <maxFieldLength>10000</maxFieldLength> --> <!-- Unlock On Startup
<!-- If true, unlock any held write or commit locks on startup. If true, unlock any held write or commit locks on startup.
This defeats the locking mechanism that allows multiple This defeats the locking mechanism that allows multiple
processes to safely access a lucene index, and should be processes to safely access a lucene index, and should be
used with care. used with care.
@@ -166,16 +214,12 @@
of closed and then opened. --> of closed and then opened. -->
<reopenReaders>true</reopenReaders> <reopenReaders>true</reopenReaders>
<!-- <!-- Commit Deletion Policy
Expert:
Controls how often Lucene loads terms into memory. Default is 128 and is likely good for most everyone. -->
<!--<termIndexInterval>256</termIndexInterval>-->
<!--
Custom deletion policies can specified here. The class must Custom deletion policies can specified here. The class must
implement org.apache.lucene.index.IndexDeletionPolicy. implement org.apache.lucene.index.IndexDeletionPolicy.
http://lucene.apache.org/java/2_3_2/api/org/apache/lucene/index/IndexDeletionPolicy.html http://lucene.apache.org/java/2_9_1/api/all/org/apache/lucene/index/IndexDeletionPolicy.html
The standard Solr IndexDeletionPolicy implementation supports deleting The standard Solr IndexDeletionPolicy implementation supports deleting
index commit points on number of commits, age of commit point and index commit points on number of commits, age of commit point and
@@ -198,39 +242,55 @@
--> -->
</deletionPolicy> </deletionPolicy>
<!-- To aid in advanced debugging, you may turn on IndexWriter debug logging. <!-- Lucene Infostream
Setting to true will set the file that the underlying Lucene IndexWriter
will write its debug infostream to. --> To aid in advanced debugging, Lucene provides an "InfoStream"
of detailed information when indexing.
Setting The value to true will instruct the underlying Lucene
IndexWriter to write its debugging info the specified file
-->
<infoStream file="INFOSTREAM.txt">false</infoStream> <infoStream file="INFOSTREAM.txt">false</infoStream>
</mainIndex> </mainIndex>
<!-- Enables JMX if and only if an existing MBeanServer is found, use this <!-- JMX
if you want to configure JMX through JVM parameters. Remove this to disable
exposing Solr configuration and statistics to JMX.
If you want to connect to a particular server, specify the agentId This example enables JMX if and only if an existing MBeanServer
e.g. <jmx agentId="myAgent" /> is found, use this if you want to configure JMX through JVM
parameters. Remove this to disable exposing Solr configuration
If you want to start a new MBeanServer, specify the serviceUrl and statistics to JMX.
e.g <jmx serviceUrl="service:jmx:rmi:///jndi/rmi://localhost:9999/solr"/>
For more details see http://wiki.apache.org/solr/SolrJmx For more details see http://wiki.apache.org/solr/SolrJmx
--> -->
<jmx /> <jmx />
<!-- If you want to connect to a particular server, specify the
agentId
-->
<!-- <jmx agentId="myAgent" /> -->
<!-- If you want to start a new MBeanServer, specify the serviceUrl -->
<!-- <jmx serviceUrl="service:jmx:rmi:///jndi/rmi://localhost:9999/solr"/>
-->
<!-- the default high-performance update handler --> <!-- The default high-performance update handler -->
<updateHandler class="solr.DirectUpdateHandler2"> <updateHandler class="solr.DirectUpdateHandler2">
<!-- A prefix of "solr." for class names is an alias that
causes solr to search appropriate packages, including
org.apache.solr.(search|update|request|core|analysis)
-->
<!-- Perform a <commit/> automatically under certain conditions: <!-- AutoCommit
maxDocs - number of updates since last commit is greater than this
maxTime - oldest uncommited update (in ms) is this long ago Perform a <commit/> automatically under certain conditions.
Instead of enabling autoCommit, consider using "commitWithin" Instead of enabling autoCommit, consider using "commitWithin"
when adding documents. http://wiki.apache.org/solr/UpdateXmlMessages when adding documents.
http://wiki.apache.org/solr/UpdateXmlMessages
maxDocs - Maximum number of documents to add since the last
commit before automatically triggering a new commit.
maxTime - Maximum amount of time that is allowed to pass
since a document was added before automaticly
triggering a new commit.
-->
<!--
<autoCommit> <autoCommit>
<maxDocs>10000</maxDocs> <maxDocs>10000</maxDocs>
<maxTime>1000</maxTime> <maxTime>1000</maxTime>
@@ -241,16 +301,29 @@
<maxTime>10000</maxTime> <!--Commit every 10 seconds--> <maxTime>10000</maxTime> <!--Commit every 10 seconds-->
</autoCommit> </autoCommit>
<!-- Update Related Event Listeners
Various IndexWriter related events can trigger Listeners to
take actions.
postCommit - fired after every commit or optimize command
postOptimize - fired after every optimize command
-->
<!-- The RunExecutableListener executes an external command from a <!-- The RunExecutableListener executes an external command from a
hook such as postCommit or postOptimize. hook such as postCommit or postOptimize.
exe - the name of the executable to run exe - the name of the executable to run
dir - dir to use as the current working directory. default="." dir - dir to use as the current working directory. (default=".")
wait - the calling thread waits until the executable returns. default="true" wait - the calling thread waits until the executable returns.
args - the arguments to pass to the program. default=nothing (default="true")
env - environment variables to set. default=nothing args - the arguments to pass to the program. (default is none)
env - environment variables to set. (default is none)
--> -->
<!-- A postCommit event is fired after every commit or optimize command <!-- This example shows how RunExecutableListener could be used
with the script based replication...
http://wiki.apache.org/solr/CollectionDistribution
-->
<!--
<listener event="postCommit" class="solr.RunExecutableListener"> <listener event="postCommit" class="solr.RunExecutableListener">
<str name="exe">solr/bin/snapshooter</str> <str name="exe">solr/bin/snapshooter</str>
<str name="dir">.</str> <str name="dir">.</str>
@@ -259,64 +332,89 @@
<arr name="env"> <str>MYVAR=val1</str> </arr> <arr name="env"> <str>MYVAR=val1</str> </arr>
</listener> </listener>
--> -->
<!-- A postOptimize event is fired only after every optimize command
<listener event="postOptimize" class="solr.RunExecutableListener">
<str name="exe">snapshooter</str>
<str name="dir">solr/bin</str>
<bool name="wait">true</bool>
</listener>
-->
</updateHandler> </updateHandler>
<!-- Use the following format to specify a custom IndexReaderFactory - allows for alternate <!-- IndexReaderFactory
IndexReader implementations.
Use the following format to specify a custom IndexReaderFactory,
which allows for alternate IndexReader implementations.
** Experimental Feature ** ** Experimental Feature **
Please note - Using a custom IndexReaderFactory may prevent certain other features
from working. The API to IndexReaderFactory may change without warning or may even Please note - Using a custom IndexReaderFactory may prevent
be removed from future releases if the problems cannot be resolved. certain other features from working. The API to
IndexReaderFactory may change without warning or may even be
removed from future releases if the problems cannot be
resolved.
** Features that may not work with custom IndexReaderFactory ** ** Features that may not work with custom IndexReaderFactory **
The ReplicationHandler assumes a disk-resident index. Using a custom
IndexReader implementation may cause incompatibility with ReplicationHandler and
may cause replication to not work correctly. See SOLR-1366 for details.
The ReplicationHandler assumes a disk-resident index. Using a
custom IndexReader implementation may cause incompatibility
with ReplicationHandler and may cause replication to not work
correctly. See SOLR-1366 for details.
-->
<!--
<indexReaderFactory name="IndexReaderFactory" class="package.class"> <indexReaderFactory name="IndexReaderFactory" class="package.class">
Parameters as required by the implementation <str name="someArg">Some Value</str>
</indexReaderFactory > </indexReaderFactory >
--> -->
<!-- To set the termInfosIndexDivisor, do this: --> <!-- By explicitly declaring the Factory, the termIndexDivisor can
<!--<indexReaderFactory name="IndexReaderFactory" class="org.apache.solr.core.StandardIndexReaderFactory"> be specified.
<int name="termInfosIndexDivisor">12</int> -->
</indexReaderFactory >--> <!--
<indexReaderFactory name="IndexReaderFactory"
class="solr.StandardIndexReaderFactory">
<int name="setTermIndexDivisor">12</int>
</indexReaderFactory >
-->
<query> <query>
<!-- Maximum number of clauses in a boolean query... in the past, this affected <!-- Max Boolean Clauses
range or prefix queries that expanded to big boolean queries - built in Solr
query parsers no longer create queries with this limitation. Maximum number of clauses in each BooleanQuery, an exception
An exception is thrown if exceeded. --> is thrown if exceeded.
** WARNING **
This option actually modifies a global Lucene property that
will affect all SolrCores. If multiple solrconfig.xml files
disagree on this property, the value at any given moment will
be based on the last SolrCore to be initialized.
-->
<maxBooleanClauses>1024</maxBooleanClauses> <maxBooleanClauses>1024</maxBooleanClauses>
<!-- There are two implementations of cache available for Solr, <!-- Solr Internal Query Caches
There are two implementations of cache available for Solr,
LRUCache, based on a synchronized LinkedHashMap, and LRUCache, based on a synchronized LinkedHashMap, and
FastLRUCache, based on a ConcurrentHashMap. FastLRUCache has faster gets FastLRUCache, based on a ConcurrentHashMap.
and slower puts in single threaded operation and thus is generally faster
than LRUCache when the hit ratio of the cache is high (> 75%), and may be FastLRUCache has faster gets and slower puts in single
faster under other scenarios on multi-cpu systems. --> threaded operation and thus is generally faster than LRUCache
<!-- Cache used by SolrIndexSearcher for filters (DocSets), when the hit ratio of the cache is high (> 75%), and may be
faster under other scenarios on multi-cpu systems.
-->
<!-- Filter Cache
Cache used by SolrIndexSearcher for filters (DocSets),
unordered sets of *all* documents that match a query. unordered sets of *all* documents that match a query.
When a new searcher is opened, its caches may be prepopulated When a new searcher is opened, its caches may be prepopulated
or "autowarmed" using data from caches in the old searcher. or "autowarmed" using data from caches in the old searcher.
autowarmCount is the number of items to prepopulate. For LRUCache, autowarmCount is the number of items to prepopulate. For LRUCache,
the autowarmed items will be the most recently accessed items. the autowarmed items will be the most recently accessed items.
Parameters: Parameters:
class - the SolrCache implementation LRUCache or FastLRUCache class - the SolrCache implementation LRUCache or
(LRUCache or FastLRUCache)
size - the maximum number of entries in the cache size - the maximum number of entries in the cache
initialSize - the initial capacity (number of entries) of initialSize - the initial capacity (number of entries) of
the cache. (seel java.util.HashMap) the cache. (see java.util.HashMap)
autowarmCount - the number of entries to prepopulate from autowarmCount - the number of entries to prepopulate from
and old cache. and old cache.
--> -->
@@ -326,65 +424,91 @@
initialSize="512" initialSize="512"
autowarmCount="0"/> autowarmCount="0"/>
<!-- Cache used to hold field values that are quickly accessible <!-- Query Result Cache
by document id. The fieldValueCache is created by default
even if not configured here.
<fieldValueCache
class="solr.FastLRUCache"
size="512"
autowarmCount="128"
showItems="32"
/>
-->
<!-- queryResultCache caches results of searches - ordered lists of Caches results of searches - ordered lists of document ids
document ids (DocList) based on a query, a sort, and the range (DocList) based on a query, a sort, and the range of documents requested.
of documents requested. --> -->
<queryResultCache <queryResultCache class="solr.LRUCache"
class="solr.LRUCache"
size="512" size="512"
initialSize="512" initialSize="512"
autowarmCount="0"/> autowarmCount="0"/>
<!-- documentCache caches Lucene Document objects (the stored fields for each document). <!-- Document Cache
Since Lucene internal document ids are transient, this cache will not be autowarmed. -->
Caches Lucene Document objects (the stored fields for each
document). Since Lucene internal document ids are transient,
this cache will not be autowarmed.
-->
<documentCache <documentCache
class="solr.LRUCache" class="solr.LRUCache"
size="512" size="512"
initialSize="512" initialSize="512"
autowarmCount="0"/> autowarmCount="0"/>
<!-- If true, stored fields that are not requested will be loaded lazily. <!-- Field Value Cache
This can result in a significant speed improvement if the usual case is to
not load all stored fields, especially if the skipped fields are large
compressed text fields.
-->
<enableLazyFieldLoading>true</enableLazyFieldLoading>
<!-- Example of a generic cache. These caches may be accessed by name Cache used to hold field values that are quickly accessible
through SolrIndexSearcher.getCache(),cacheLookup(), and cacheInsert(). by document id. The fieldValueCache is created by default
The purpose is to enable easy caching of user/application level data. even if not configured here.
The regenerator argument should be specified as an implementation -->
of solr.search.CacheRegenerator if autowarming is desired. --> <!--
<fieldValueCache class="solr.FastLRUCache"
size="512"
autowarmCount="128"
showItems="32" />
-->
<!-- Custom Cache
Example of a generic cache. These caches may be accessed by
name through SolrIndexSearcher.getCache(),cacheLookup(), and
cacheInsert(). The purpose is to enable easy caching of
user/application level data. The regenerator argument should
be specified as an implementation of solr.CacheRegenerator
if autowarming is desired.
-->
<!-- <!--
<cache name="myUserCache" <cache name="myUserCache"
class="solr.LRUCache" class="solr.LRUCache"
size="4096" size="4096"
initialSize="1024" initialSize="1024"
autowarmCount="1024" autowarmCount="1024"
regenerator="org.mycompany.mypackage.MyRegenerator" regenerator="com.mycompany.MyRegenerator"
/> />
--> -->
<!-- An optimization that attempts to use a filter to satisfy a search.
If the requested sort does not include score, then the filterCache <!-- Lazy Field Loading
will be checked for a filter matching the query. If found, the filter
will be used as the source of document ids, and then the sort will be If true, stored fields that are not requested will be loaded
applied to that. lazily. This can result in a significant speed improvement
if the usual case is to not load all stored fields,
especially if the skipped fields are large compressed text
fields.
-->
<enableLazyFieldLoading>true</enableLazyFieldLoading>
<!-- Use Filter For Sorted Query
A possible optimization that attempts to use a filter to
satisfy a search. If the requested sort does not include
score, then the filterCache will be checked for a filter
matching the query. If found, the filter will be used as the
source of document ids, and then the sort will be applied to
that.
For most situations, this will not be useful unless you
frequently get the same search repeatedly with different sort
options, and none of them ever use "score"
-->
<!--
<useFilterForSortedQuery>true</useFilterForSortedQuery> <useFilterForSortedQuery>true</useFilterForSortedQuery>
--> -->
<!-- An optimization for use with the queryResultCache. When a search <!-- Result Window Size
An optimization for use with the queryResultCache. When a search
is requested, a superset of the requested number of document ids is requested, a superset of the requested number of document ids
are collected. For example, if a search for a particular query are collected. For example, if a search for a particular query
requests matching documents 10 through 19, and queryWindowSize is 50, requests matching documents 10 through 19, and queryWindowSize is 50,
@@ -396,10 +520,21 @@
queryResultCache. --> queryResultCache. -->
<queryResultMaxDocsCached>200</queryResultMaxDocsCached> <queryResultMaxDocsCached>200</queryResultMaxDocsCached>
<!-- a newSearcher event is fired whenever a new searcher is being prepared <!-- Query Related Event Listeners
Various IndexSearcher related events can trigger Listeners to
take actions.
newSearcher - fired whenever a new searcher is being prepared
and there is a current searcher handling requests (aka registered). and there is a current searcher handling requests (aka registered).
It can be used to prime certain caches to prevent long request times for It can be used to prime certain caches to prevent long request times for
certain requests. certain requests.
firstSearcher - fired whenever a new searcher is being
prepared but there is no current registered searcher to handle
requests or to gain autowarming data from.
--> -->
<!-- QuerySenderListener takes an array of NamedList and executes a <!-- QuerySenderListener takes an array of NamedList and executes a
local query request for each NamedList in sequence. --> local query request for each NamedList in sequence. -->
@@ -1027,7 +1162,7 @@
<!-- config for the admin interface --> <!-- config for the admin interface -->
<admin> <admin>
<defaultQuery>solr</defaultQuery> <defaultQuery>*:*</defaultQuery>
<!-- configure a healthcheck file for servers behind a loadbalancer <!-- configure a healthcheck file for servers behind a loadbalancer
<healthcheck type="file">server-enabled</healthcheck> <healthcheck type="file">server-enabled</healthcheck>

View File

@@ -255,6 +255,7 @@
--> -->
<fieldtype name="ignored" stored="false" indexed="false" class="solr.StrField" /> <fieldtype name="ignored" stored="false" indexed="false" class="solr.StrField" />
<fieldType name="uuid" class="solr.UUIDField" indexed="true" />
</types> </types>
@@ -277,8 +278,8 @@
best performance. best performance.
--> -->
<field name="type" type="integer" indexed="true" stored="true" required="true" /> <field name="type" type="integer" indexed="true" stored="true" required="false" />
<field name="id" type="integer" indexed="true" stored="true" required="true" /> <field name="id" type="integer" indexed="true" stored="true" required="false" />
<field name="ip" type="string" indexed="true" stored="true" required="false" /> <field name="ip" type="string" indexed="true" stored="true" required="false" />
<field name="time" type="date" indexed="true" stored="true" required="true" /> <field name="time" type="date" indexed="true" stored="true" required="true" />
<field name="epersonid" type="integer" indexed="true" stored="true" required="false" /> <field name="epersonid" type="integer" indexed="true" stored="true" required="false" />
@@ -286,8 +287,8 @@
<field name="country" type="string" indexed="true" stored="true" required="false" /> <field name="country" type="string" indexed="true" stored="true" required="false" />
<field name="countryCode" type="string" indexed="true" stored="true" required="false"/> <field name="countryCode" type="string" indexed="true" stored="true" required="false"/>
<field name="city" type="string" indexed="true" stored="true" required="false"/> <field name="city" type="string" indexed="true" stored="true" required="false"/>
<field name="longitude" type="float" indexed="true" stored="true" required="false" /> <field name="longitude" type="float" indexed="true" stored="true" required="false" />
<field name="latitude" type="float" indexed="true" stored="true" required="false" /> <field name="latitude" type="float" indexed="true" stored="true" required="false" />
<field name="owningComm" type="integer" indexed="true" stored="true" required="false" multiValued="true" /> <field name="owningComm" type="integer" indexed="true" stored="true" required="false" multiValued="true" />
<field name="owningColl" type="integer" indexed="true" stored="true" required="false" multiValued="true" /> <field name="owningColl" type="integer" indexed="true" stored="true" required="false" multiValued="true" />
<field name="owningItem" type="integer" indexed="true" stored="true" required="false" multiValued="true" /> <field name="owningItem" type="integer" indexed="true" stored="true" required="false" multiValued="true" />
@@ -295,9 +296,33 @@
<field name="userAgent" type="string" indexed="true" stored="true" required="false"/> <field name="userAgent" type="string" indexed="true" stored="true" required="false"/>
<field name="isBot" type="boolean" indexed="true" stored="true" required="false"/> <field name="isBot" type="boolean" indexed="true" stored="true" required="false"/>
<field name="bundleName" type="string" indexed="true" stored="true" required="false" multiValued="true" /> <field name="bundleName" type="string" indexed="true" stored="true" required="false" multiValued="true" />
<field name="referrer" type="string" indexed="true" stored="true" required="false"/>
<field name="uid" type="uuid" indexed="true" stored="true" default="NEW" />
<!--Can either be view/search/search_result/workflow-->
<field name="statistics_type" type="string" indexed="true" stored="true" required="true" default="view" />
<!-- Search specific -->
<field name="query" type="string" indexed="true" stored="true" required="false" multiValued="true"/>
<field name="scopeType" type="integer" indexed="true" stored="true" required="false" />
<field name="scopeId" type="integer" indexed="true" stored="true" required="false" />
<field name="rpp" type="integer" indexed="true" stored="true" required="false" />
<field name="sortBy" type="string" indexed="true" stored="true" required="false" />
<field name="sortOrder" type="string" indexed="true" stored="true" required="false" />
<field name="page" type="integer" indexed="true" stored="true" required="false" />
<!--Workflow spefic-->
<field name="workflowStep" type="string" indexed="true" stored="true" required="false" multiValued="true"/>
<field name="previousWorkflowStep" type="string" indexed="true" stored="true" required="false" multiValued="true"/>
<field name="owner" type="string" indexed="true" stored="true" required="false" multiValued="true"/>
<field name="submitter" type="integer" indexed="true" stored="true" required="false" />
<field name="actor" type="integer" indexed="true" stored="true" required="false" />
<field name="workflowItemId" type="integer" indexed="true" stored="true" required="false" />
</fields> </fields>
<uniqueKey>uid</uniqueKey>
<!-- field for the QueryParser to use when an explicit fieldname is absent --> <!-- field for the QueryParser to use when an explicit fieldname is absent -->
<defaultSearchField>id</defaultSearchField> <defaultSearchField>id</defaultSearchField>

View File

@@ -694,7 +694,7 @@
<dependency> <dependency>
<groupId>commons-io</groupId> <groupId>commons-io</groupId>
<artifactId>commons-io</artifactId> <artifactId>commons-io</artifactId>
<version>1.4</version> <version>2.3</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>commons-lang</groupId> <groupId>commons-lang</groupId>