Merge branch 'add-edismax-parser-support_contribute-7.6'

# Conflicts:
#	dspace-api/src/main/java/org/dspace/statistics/SolrLoggerServiceImpl.java
This commit is contained in:
Alexandre Vryghem
2025-03-17 23:42:01 +01:00
10 changed files with 16 additions and 16 deletions

View File

@@ -1231,8 +1231,8 @@ public class LogAnalyser {
}
accessionedQuery.append("]");
discoverQuery.addFilterQueries(accessionedQuery.toString());
discoverQuery.addFilterQueries("withdrawn: false");
discoverQuery.addFilterQueries("archived: true");
discoverQuery.addFilterQueries("withdrawn:false");
discoverQuery.addFilterQueries("archived:true");
return (int) SearchUtils.getSearchService().search(context, discoverQuery).getTotalSearchResults();
}

View File

@@ -362,9 +362,9 @@ public class SolrBrowseDAO implements BrowseDAO {
}
if (isAscending) {
query.setQuery("bi_" + column + "_sort" + ": [* TO \"" + value + "\"}");
query.setQuery("bi_" + column + "_sort" + ":[* TO \"" + value + "\"}");
} else {
query.setQuery("bi_" + column + "_sort" + ": {\"" + value + "\" TO *]");
query.setQuery("bi_" + column + "_sort" + ":{\"" + value + "\" TO *]");
query.addFilterQueries("-(bi_" + column + "_sort" + ":" + value + "*)");
}
DiscoverResult resp = null;

View File

@@ -1266,7 +1266,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
try {
SolrQuery solrQuery = new SolrQuery();
//Set the query to handle since this is unique
solrQuery.setQuery(SearchUtils.RESOURCE_UNIQUE_ID + ": " + new IndexableItem(item).getUniqueIndexID());
solrQuery.setQuery(SearchUtils.RESOURCE_UNIQUE_ID + ":" + new IndexableItem(item).getUniqueIndexID());
//Only return obj identifier fields in result doc
solrQuery.setFields(SearchUtils.RESOURCE_TYPE_FIELD, SearchUtils.RESOURCE_ID_FIELD);
//Add the more like this parameters !

View File

@@ -117,9 +117,9 @@ public class Harvest {
}
if (!withdrawn) {
discoverQuery.addFilterQueries("archived: true OR withdrawn: false");
discoverQuery.addFilterQueries("archived:true OR withdrawn:false");
} else {
discoverQuery.addFilterQueries("archived: true OR withdrawn: true");
discoverQuery.addFilterQueries("archived:true OR withdrawn:true");
}
// Order by item ID, so that for a given harvest the order will be

View File

@@ -93,7 +93,7 @@ public class StatisticsLoggingConsumer implements Consumer {
// We are mapping a new item make sure that the owning collection is
// updated
Item newItem = (Item) event.getObject(ctx);
String updateQuery = "id: " + newItem.getID() + " AND type:"
String updateQuery = "id:" + newItem.getID() + " AND type:"
+ newItem.getType();
List<String> fieldNames = new ArrayList<String>();
@@ -116,7 +116,7 @@ public class StatisticsLoggingConsumer implements Consumer {
&& event.getObject(ctx) instanceof Item) {
// Unmapping items
Item newItem = (Item) event.getObject(ctx);
String updateQuery = "id: " + newItem.getID() + " AND type:"
String updateQuery = "id:" + newItem.getID() + " AND type:"
+ newItem.getType();
List<String> fieldNames = new ArrayList<String>();

View File

@@ -68,10 +68,10 @@ public class StatisticsBSAdapter {
switch (visitType) {
case ITEM_VISITS:
return solrLoggerService
.queryTotal("type: " + Constants.ITEM + " AND id: " + item.getID(), resolveFilterQueries(), 0)
.queryTotal("type:" + Constants.ITEM + " AND id:" + item.getID(), resolveFilterQueries(), 0)
.getCount();
case BITSTREAM_VISITS:
return solrLoggerService.queryTotal("type: " + Constants.BITSTREAM + " AND owningItem: " + item.getID(),
return solrLoggerService.queryTotal("type:" + Constants.BITSTREAM + " AND owningItem:" + item.getID(),
resolveFilterQueries(), 0).getCount();
case TOTAL_VISITS:
return getNumberOfVisits(ITEM_VISITS, item) + getNumberOfVisits(BITSTREAM_VISITS, item);

View File

@@ -209,7 +209,7 @@ public class StatisticsDataSearches extends StatisticsData {
protected String getQuery() {
String query;
if (currentDso != null) {
query = "scopeType: " + currentDso.getType() + " AND ";
query = "scopeType:" + currentDso.getType() + " AND ";
if (currentDso instanceof DSpaceObjectLegacySupport) {
query += " (scopeId:" + currentDso.getID() + " OR scopeId:" + ((DSpaceObjectLegacySupport) currentDso)
.getLegacyId() + ")";

View File

@@ -237,7 +237,7 @@ public class StatisticsDataVisits extends StatisticsData {
false, null, facetMinCount);
for (int j = 0; j < maxObjectCounts.length; j++) {
ObjectCount firstCount = maxObjectCounts[j];
String newQuery = dataSetQuery.getFacetField() + ": " + ClientUtils
String newQuery = dataSetQuery.getFacetField() + ":" + ClientUtils
.escapeQueryChars(firstCount.getValue()) + " AND " + query;
ObjectCount[] maxDateFacetCounts = solrLoggerService
.queryFacetDate(newQuery, filterQuery, dataSetQuery.getMax(), dateFacet.getDateType(),
@@ -813,7 +813,7 @@ public class StatisticsDataVisits extends StatisticsData {
String query = "";
//Check (& add if needed) the dsoType
if (dsoType != -1) {
query += "type: " + dsoType;
query += "type:" + dsoType;
}
//Check (& add if needed) the dsoId

View File

@@ -146,7 +146,7 @@ public class StatisticsDataWorkflow extends StatisticsData {
*/
protected String getQuery() {
String query = "statistics_type:" + SolrLoggerServiceImpl.StatisticsType.WORKFLOW.text();
query += " AND NOT(previousWorkflowStep: SUBMIT)";
query += " AND NOT(previousWorkflowStep:SUBMIT)";
if (currentDso != null) {
if (currentDso.getType() == Constants.COMMUNITY) {
query += " AND owningComm:";

View File

@@ -131,7 +131,7 @@ public class MetadataExportSearchIT extends AbstractIntegrationTestWithDatabase
checkItemsPresentInFile(filename, itemsSubject1);
result = runDSpaceScript("metadata-export-search", "-q", "subject: " + subject2, "-n", filename);
result = runDSpaceScript("metadata-export-search", "-q", "subject:" + subject2, "-n", filename);
assertEquals(0, result);
checkItemsPresentInFile(filename, itemsSubject2);