Merge pull request #3265 from 4Science/CST-4121-MakeDiscoveryMoreResilientToStaleObjects

Make discovery more resilient to stale objects
This commit is contained in:
Tim Donohue
2021-07-12 08:46:46 -05:00
committed by GitHub
4 changed files with 454 additions and 66 deletions

View File

@@ -719,12 +719,8 @@ public class SolrServiceImpl implements SearchService, IndexingService {
if (solrSearchCore.getSolr() == null) {
return new DiscoverResult();
}
SolrQuery solrQuery = resolveToSolrQuery(context, discoveryQuery);
QueryResponse queryResponse = solrSearchCore.getSolr().query(solrQuery,
solrSearchCore.REQUEST_METHOD);
return retrieveResult(context, discoveryQuery, queryResponse);
return retrieveResult(context, discoveryQuery);
} catch (Exception e) {
throw new org.dspace.discovery.SearchServiceException(e.getMessage(), e);
@@ -750,6 +746,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
// Also ensure a few key obj identifier fields are returned with every query
solrQuery.addField(SearchUtils.RESOURCE_TYPE_FIELD);
solrQuery.addField(SearchUtils.RESOURCE_ID_FIELD);
solrQuery.addField(SearchUtils.RESOURCE_UNIQUE_ID);
if (discoveryQuery.isSpellCheck()) {
solrQuery.setParam(SpellingParams.SPELLCHECK_Q, query);
@@ -861,65 +858,120 @@ public class SolrServiceImpl implements SearchService, IndexingService {
return solrQuery;
}
protected DiscoverResult retrieveResult(Context context, DiscoverQuery query, QueryResponse solrQueryResponse)
throws SQLException {
DiscoverResult result = new DiscoverResult();
protected DiscoverResult retrieveResult(Context context, DiscoverQuery query)
throws SQLException, SolrServerException, IOException, SearchServiceException {
// we use valid and executeLimit to decide if the solr query need to be re-run if we found some stale objects
boolean valid = false;
int executionCount = 0;
DiscoverResult result = null;
SolrQuery solrQuery = resolveToSolrQuery(context, query);
// how many re-run of the query are allowed other than the first run
int maxAttempts = configurationService.getIntProperty("discovery.removestale.attempts", 3);
do {
executionCount++;
result = new DiscoverResult();
// if we found stale objects we can decide to skip execution of the remaining code to improve performance
boolean skipLoadingResponse = false;
// use zombieDocs to collect stale found objects
List<String> zombieDocs = new ArrayList<String>();
QueryResponse solrQueryResponse = solrSearchCore.getSolr().query(solrQuery,
solrSearchCore.REQUEST_METHOD);
if (solrQueryResponse != null) {
result.setSearchTime(solrQueryResponse.getQTime());
result.setStart(query.getStart());
result.setMaxResults(query.getMaxResults());
result.setTotalSearchResults(solrQueryResponse.getResults().getNumFound());
if (solrQueryResponse != null) {
result.setSearchTime(solrQueryResponse.getQTime());
result.setStart(query.getStart());
result.setMaxResults(query.getMaxResults());
result.setTotalSearchResults(solrQueryResponse.getResults().getNumFound());
List<String> searchFields = query.getSearchFields();
for (SolrDocument doc : solrQueryResponse.getResults()) {
IndexableObject indexableObject = findIndexableObject(context, doc);
List<String> searchFields = query.getSearchFields();
for (SolrDocument doc : solrQueryResponse.getResults()) {
IndexableObject indexableObject = findIndexableObject(context, doc);
if (indexableObject != null) {
result.addIndexableObject(indexableObject);
} else {
log.error(LogManager.getHeader(context,
"Error while retrieving DSpace object from discovery index",
"Unique identifier: " + doc.getFirstValue(SearchUtils.RESOURCE_UNIQUE_ID)));
continue;
}
DiscoverResult.SearchDocument resultDoc = new DiscoverResult.SearchDocument();
//Add information about our search fields
for (String field : searchFields) {
List<String> valuesAsString = new ArrayList<>();
for (Object o : doc.getFieldValues(field)) {
valuesAsString.add(String.valueOf(o));
}
resultDoc.addSearchField(field, valuesAsString.toArray(new String[valuesAsString.size()]));
}
result.addSearchDocument(indexableObject, resultDoc);
if (solrQueryResponse.getHighlighting() != null) {
Map<String, List<String>> highlightedFields = solrQueryResponse.getHighlighting().get(
indexableObject.getUniqueIndexID());
if (MapUtils.isNotEmpty(highlightedFields)) {
//We need to remove all the "_hl" appendix strings from our keys
Map<String, List<String>> resultMap = new HashMap<>();
for (String key : highlightedFields.keySet()) {
List<String> highlightOriginalValue = highlightedFields.get(key);
List<String[]> resultHighlightOriginalValue = new ArrayList<>();
for (String highlightValue : highlightOriginalValue) {
String[] splitted = highlightValue.split("###");
resultHighlightOriginalValue.add(splitted);
}
resultMap.put(key.substring(0, key.lastIndexOf("_hl")), highlightedFields.get(key));
if (indexableObject != null) {
result.addIndexableObject(indexableObject);
} else {
// log has warn because we try to fix the issue
log.warn(LogManager.getHeader(context,
"Stale entry found in Discovery index,"
+ " as we could not find the DSpace object it refers to. ",
"Unique identifier: " + doc.getFirstValue(SearchUtils.RESOURCE_UNIQUE_ID)));
// Enables solr to remove documents related to items not on database anymore (Stale)
// if maxAttemps is greater than 0 cleanup the index on each step
if (maxAttempts >= 0) {
zombieDocs.add((String) doc.getFirstValue(SearchUtils.RESOURCE_UNIQUE_ID));
// avoid to process the response except if we are in the last allowed execution.
// When maxAttempts is 0 this will be just the first and last run as the
// executionCount is increased at the start of the loop it will be equals to 1
skipLoadingResponse = maxAttempts + 1 != executionCount;
}
continue;
}
if (!skipLoadingResponse) {
DiscoverResult.SearchDocument resultDoc = new DiscoverResult.SearchDocument();
// Add information about our search fields
for (String field : searchFields) {
List<String> valuesAsString = new ArrayList<>();
for (Object o : doc.getFieldValues(field)) {
valuesAsString.add(String.valueOf(o));
}
resultDoc.addSearchField(field, valuesAsString.toArray(new String[valuesAsString.size()]));
}
result.addSearchDocument(indexableObject, resultDoc);
result.addHighlightedResult(indexableObject,
new DiscoverResult.IndexableObjectHighlightResult(indexableObject, resultMap));
if (solrQueryResponse.getHighlighting() != null) {
Map<String, List<String>> highlightedFields = solrQueryResponse.getHighlighting().get(
indexableObject.getUniqueIndexID());
if (MapUtils.isNotEmpty(highlightedFields)) {
//We need to remove all the "_hl" appendix strings from our keys
Map<String, List<String>> resultMap = new HashMap<>();
for (String key : highlightedFields.keySet()) {
List<String> highlightOriginalValue = highlightedFields.get(key);
List<String[]> resultHighlightOriginalValue = new ArrayList<>();
for (String highlightValue : highlightOriginalValue) {
String[] splitted = highlightValue.split("###");
resultHighlightOriginalValue.add(splitted);
}
resultMap.put(key.substring(0, key.lastIndexOf("_hl")), highlightedFields.get(key));
}
result.addHighlightedResult(indexableObject,
new DiscoverResult.IndexableObjectHighlightResult(indexableObject, resultMap));
}
}
}
}
//Resolve our facet field values
resolveFacetFields(context, query, result, skipLoadingResponse, solrQueryResponse);
}
// If any stale entries are found in the current page of results,
// we remove those stale entries and rerun the same query again.
// Otherwise, the query is valid and the results are returned.
if (zombieDocs.size() != 0) {
log.info("Cleaning " + zombieDocs.size() + " stale objects from Discovery Index");
log.info("ZombieDocs ");
zombieDocs.forEach(log::info);
solrSearchCore.getSolr().deleteById(zombieDocs);
solrSearchCore.getSolr().commit();
} else {
valid = true;
}
} while (!valid && executionCount <= maxAttempts);
//Resolve our facet field values
List<FacetField> facetFields = solrQueryResponse.getFacetFields();
if (!valid && executionCount == maxAttempts) {
String message = "The Discovery (Solr) index has a large number of stale entries,"
+ " and we could not complete this request. Please reindex all content"
+ " to remove these stale entries (e.g. dspace index-discovery -f).";
log.fatal(message);
throw new RuntimeException(message);
}
return result;
}
private void resolveFacetFields(Context context, DiscoverQuery query, DiscoverResult result,
boolean skipLoadingResponse, QueryResponse solrQueryResponse) throws SQLException {
List<FacetField> facetFields = solrQueryResponse.getFacetFields();
if (!skipLoadingResponse) {
if (facetFields != null) {
for (int i = 0; i < facetFields.size(); i++) {
FacetField facetField = facetFields.get(i);
@@ -940,7 +992,8 @@ public class SolrServiceImpl implements SearchService, IndexingService {
String field = transformFacetField(facetFieldConfig, facetField.getName(), true);
String authorityValue = transformAuthorityValue(context, facetField.getName(),
facetValue.getName());
String sortValue = transformSortValue(context, facetField.getName(), facetValue.getName());
String sortValue = transformSortValue(context,
facetField.getName(), facetValue.getName());
String filterValue = displayedValue;
if (StringUtils.isNotBlank(authorityValue)) {
filterValue = authorityValue;
@@ -956,7 +1009,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
}
}
if (solrQueryResponse.getFacetQuery() != null) {
if (solrQueryResponse.getFacetQuery() != null && !skipLoadingResponse) {
// just retrieve the facets in the order they where requested!
// also for the date we ask it in proper (reverse) order
// At the moment facet queries are only used for dates
@@ -964,7 +1017,8 @@ public class SolrServiceImpl implements SearchService, IndexingService {
solrQueryResponse.getFacetQuery());
for (String facetQuery : sortedFacetQueries.keySet()) {
//TODO: do not assume this, people may want to use it for other ends, use a regex to make sure
//We have a facet query, the values looks something like: dateissued.year:[1990 TO 2000] AND -2000
//We have a facet query, the values looks something like:
//dateissued.year:[1990 TO 2000] AND -2000
//Prepare the string from {facet.field.name}:[startyear TO endyear] to startyear - endyear
String facetField = facetQuery.substring(0, facetQuery.indexOf(":"));
String name = "";
@@ -975,7 +1029,6 @@ public class SolrServiceImpl implements SearchService, IndexingService {
filter = facetQuery.substring(facetQuery.indexOf('['));
filter = filter.substring(0, filter.lastIndexOf(']') + 1);
}
Integer count = sortedFacetQueries.get(facetQuery);
//No need to show empty years
@@ -987,16 +1040,13 @@ public class SolrServiceImpl implements SearchService, IndexingService {
}
}
}
if (solrQueryResponse.getSpellCheckResponse() != null) {
if (solrQueryResponse.getSpellCheckResponse() != null && !skipLoadingResponse) {
String recommendedQuery = solrQueryResponse.getSpellCheckResponse().getCollatedResult();
if (StringUtils.isNotBlank(recommendedQuery)) {
result.setSpellCheckQuery(recommendedQuery);
}
}
}
return result;
}
/**

View File

@@ -31,9 +31,11 @@ import org.dspace.content.authority.Choices;
import org.dspace.content.authority.factory.ContentAuthorityServiceFactory;
import org.dspace.content.authority.service.MetadataAuthorityService;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService;
import org.dspace.content.service.ItemService;
import org.dspace.content.service.WorkspaceItemService;
import org.dspace.discovery.indexobject.IndexableClaimedTask;
import org.dspace.discovery.indexobject.IndexableCollection;
import org.dspace.discovery.indexobject.IndexableItem;
import org.dspace.discovery.indexobject.IndexablePoolTask;
import org.dspace.discovery.indexobject.IndexableWorkflowItem;
@@ -71,6 +73,8 @@ public class DiscoveryIT extends AbstractIntegrationTestWithDatabase {
ClaimedTaskService claimedTaskService = XmlWorkflowServiceFactory.getInstance().getClaimedTaskService();
CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService();
ItemService itemService = ContentServiceFactory.getInstance().getItemService();
IndexingService indexer = DSpaceServicesFactory.getInstance().getServiceManager()
@@ -316,14 +320,344 @@ public class DiscoveryIT extends AbstractIntegrationTestWithDatabase {
}
@Test
public void verifySolrRecordsOfDeletedObjectsTest() throws Exception {
context.turnOffAuthorisationSystem();
parentCommunity = CommunityBuilder.createCommunity(context)
.withName("Parent Community")
.build();
Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity)
.withName("Sub Community 2")
.build();
Community child2 = CommunityBuilder.createSubCommunity(context, parentCommunity)
.withName("Sub Community 2")
.build();
Collection col1 = CollectionBuilder.createCollection(context, child1)
.withName("Collection 1").build();
Collection col2 = CollectionBuilder.createCollection(context, child1)
.withName("Collection 2").build();
CollectionBuilder.createCollection(context, child2)
.withName("Collection 3").build();
ItemBuilder.createItem(context, col1)
.withTitle("Public item 1")
.withIssueDate("2017-10-17")
.withAuthor("Smith, Donald")
.withSubject("ExtraEntry")
.build();
ItemBuilder.createItem(context, col2)
.withTitle("Public item 2")
.withIssueDate("2016-02-13")
.withAuthor("Smith, Maria")
.withSubject("TestingForMore")
.build();
ItemBuilder.createItem(context, col2)
.withTitle("Public item 3")
.withIssueDate("2016-02-13")
.withAuthor("Doe, Jane")
.withSubject("AnotherTest")
.withSubject("ExtraEntry")
.build();
context.setDispatcher("noindex");
assertSearchQuery(IndexableCollection.TYPE, 3);
assertSearchQuery(IndexableItem.TYPE, 3);
collectionService.delete(context, col1);
context.restoreAuthSystemState();
assertSearchQuery(IndexableCollection.TYPE, 2);
assertSearchQuery(IndexableItem.TYPE, 2);
}
@Test
public void verifySolrRecordsOfDeletedObjectsPaginationTest() throws Exception {
context.turnOffAuthorisationSystem();
parentCommunity = CommunityBuilder.createCommunity(context)
.withName("Parent Community")
.build();
Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity)
.withName("Sub Community 2")
.build();
Community child2 = CommunityBuilder.createSubCommunity(context, parentCommunity)
.withName("Sub Community 2")
.build();
Collection col1 = CollectionBuilder.createCollection(context, child1)
.withName("Collection 1").build();
Collection col2 = CollectionBuilder.createCollection(context, child1)
.withName("Collection 2").build();
Collection col3 = CollectionBuilder.createCollection(context, child2)
.withName("Collection 3").build();
ItemBuilder.createItem(context, col1)
.withTitle("Public item 1")
.withIssueDate("2010-10-17")
.withAuthor("Smith, Donald")
.withSubject("ExtraEntry")
.build();
ItemBuilder.createItem(context, col2)
.withTitle("Public item 2")
.withIssueDate("2011-08-13")
.withAuthor("Smith, Maria")
.withSubject("TestingForMore")
.build();
ItemBuilder.createItem(context, col2)
.withTitle("Public item 3")
.withIssueDate("2012-02-19")
.withAuthor("Doe, Jane")
.withSubject("AnotherTest")
.withSubject("ExtraEntry")
.build();
ItemBuilder.createItem(context, col3)
.withTitle("Public item 4")
.withIssueDate("2013-05-16")
.withAuthor("Vova, Jane")
.withSubject("ExtraEntry")
.build();
ItemBuilder.createItem(context, col3)
.withTitle("Public item 5")
.withIssueDate("2015-04-13")
.withAuthor("Marco, Bruni")
.withSubject("ExtraEntry")
.build();
ItemBuilder.createItem(context, col3)
.withTitle("Public item 6")
.withIssueDate("2016-01-21")
.withAuthor("Andriy, Beket")
.withSubject("ExtraEntry")
.build();
context.setDispatcher("noindex");
// check Collection type with start=0 and limit=default, we expect: indexableObjects=3, totalFound=3
assertSearchQuery(IndexableCollection.TYPE, 3, 3, 0, -1);
// check Item type with page=0 and limit=default, we expect: indexableObjects=6, totalFound=6
assertSearchQuery(IndexableItem.TYPE, 6, 6, 0, -1);
// delete col3 and all items that it contained
collectionService.delete(context, col3);
context.restoreAuthSystemState();
// check Collection type with start=0 and limit=default, we expect: indexableObjects=2, totalFound=2
assertSearchQuery(IndexableCollection.TYPE, 2, 2, 0, -1);
// check Item type with start=0 and limit=2, we expect: indexableObjects=2, totalFound=6
assertSearchQuery(IndexableItem.TYPE, 2, 6, 0, 2);
// check Item type with start=2 and limit=4, we expect: indexableObjects=1, totalFound=3
assertSearchQuery(IndexableItem.TYPE, 1, 3, 2, 4);
// check Item type with start=0 and limit=default, we expect: indexableObjects=3, totalFound=3
// totalFound now is 3 because stale objects deleted
assertSearchQuery(IndexableItem.TYPE, 3, 3, 0, -1);
}
@Test
public void disabledSolrToRemoveStaleObjectsTest() throws Exception {
context.turnOffAuthorisationSystem();
// disable removal of solr documents related to items not on database anymore (Stale)
configurationService.setProperty("discovery.removestale.attempts", -1);
parentCommunity = CommunityBuilder.createCommunity(context)
.withName("Parent Community")
.build();
Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity)
.withName("Sub Community 2")
.build();
Community child2 = CommunityBuilder.createSubCommunity(context, parentCommunity)
.withName("Sub Community 2")
.build();
Collection col1 = CollectionBuilder.createCollection(context, child1)
.withName("Collection 1").build();
Collection col2 = CollectionBuilder.createCollection(context, child1)
.withName("Collection 2").build();
Collection col3 = CollectionBuilder.createCollection(context, child2)
.withName("Collection 3").build();
ItemBuilder.createItem(context, col1)
.withTitle("Public item 1")
.withIssueDate("2010-10-17")
.withAuthor("Smith, Donald")
.withSubject("ExtraEntry")
.build();
ItemBuilder.createItem(context, col2)
.withTitle("Public item 2")
.withIssueDate("2011-08-13")
.withAuthor("Smith, Maria")
.withSubject("TestingForMore")
.build();
ItemBuilder.createItem(context, col2)
.withTitle("Public item 3")
.withIssueDate("2012-02-19")
.withAuthor("Doe, Jane")
.withSubject("AnotherTest")
.withSubject("ExtraEntry")
.build();
ItemBuilder.createItem(context, col3)
.withTitle("Public item 4")
.withIssueDate("2013-05-16")
.withAuthor("Vova, Jane")
.withSubject("ExtraEntry")
.build();
ItemBuilder.createItem(context, col3)
.withTitle("Public item 5")
.withIssueDate("2015-04-13")
.withAuthor("Marco, Bruni")
.withSubject("ExtraEntry")
.build();
ItemBuilder.createItem(context, col3)
.withTitle("Public item 6")
.withIssueDate("2016-01-21")
.withAuthor("Andriy, Beket")
.withSubject("ExtraEntry")
.build();
context.setDispatcher("noindex");
// check Collection type with start=0 and limit=default, we expect: indexableObjects=3, totalFound=3
assertSearchQuery(IndexableCollection.TYPE, 3, 3, 0, -1);
// check Item type with page=0 and limit=default, we expect: indexableObjects=6, totalFound=6
assertSearchQuery(IndexableItem.TYPE, 6, 6, 0, -1);
// delete col3 and all items that it contained
collectionService.delete(context, col3);
context.restoreAuthSystemState();
// check Collection type with start=0 and limit=default,
// we expect: indexableObjects=2, totalFound=should be 2 but we have 3 ->(1 stale object here)
assertSearchQuery(IndexableCollection.TYPE, 2, 3, 0, -1);
// check Item type with start=0 and limit=2, we expect: indexableObjects=2, totalFound=6
assertSearchQuery(IndexableItem.TYPE, 2, 6, 0, 2);
// check Item type with start=2 and limit=4,
// we expect: indexableObjects=1, totalFound=should be 3 but we have 6 ->(3 stale objects here)
assertSearchQuery(IndexableItem.TYPE, 1, 6, 2, 4);
// check Item type with start=0 and limit=default,
// we expect: indexableObjects=3, totalFound=should be 3 but we have 6 ->(3 stale objects here)
assertSearchQuery(IndexableItem.TYPE, 3, 6, 0, -1);
}
@Test
public void disabledRerunOfSolrQueryDueToStaleObjectsTest() throws Exception {
context.turnOffAuthorisationSystem();
// disable re-run of the solr query when stale documents are found
configurationService.setProperty("discovery.removestale.attempts", 0);
parentCommunity = CommunityBuilder.createCommunity(context)
.withName("Parent Community")
.build();
Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity)
.withName("Sub Community 2")
.build();
Community child2 = CommunityBuilder.createSubCommunity(context, parentCommunity)
.withName("Sub Community 2")
.build();
Collection col1 = CollectionBuilder.createCollection(context, child1)
.withName("Collection 1").build();
Collection col2 = CollectionBuilder.createCollection(context, child1)
.withName("Collection 2").build();
Collection col3 = CollectionBuilder.createCollection(context, child2)
.withName("Collection 3").build();
ItemBuilder.createItem(context, col1)
.withTitle("Public item 1")
.withIssueDate("2010-10-17")
.withAuthor("Smith, Donald")
.withSubject("ExtraEntry")
.build();
ItemBuilder.createItem(context, col2)
.withTitle("Public item 2")
.withIssueDate("2011-08-13")
.withAuthor("Smith, Maria")
.withSubject("TestingForMore")
.build();
ItemBuilder.createItem(context, col2)
.withTitle("Public item 3")
.withIssueDate("2012-02-19")
.withAuthor("Doe, Jane")
.withSubject("AnotherTest")
.withSubject("ExtraEntry")
.build();
ItemBuilder.createItem(context, col3)
.withTitle("Public item 4")
.withIssueDate("2013-05-16")
.withAuthor("Vova, Jane")
.withSubject("ExtraEntry")
.build();
ItemBuilder.createItem(context, col3)
.withTitle("Public item 5")
.withIssueDate("2015-04-13")
.withAuthor("Marco, Bruni")
.withSubject("ExtraEntry")
.build();
ItemBuilder.createItem(context, col3)
.withTitle("Public item 6")
.withIssueDate("2016-01-21")
.withAuthor("Andriy, Beket")
.withSubject("ExtraEntry")
.build();
context.setDispatcher("noindex");
// check Collection type with start=0 and limit=default, we expect: indexableObjects=3, totalFound=3
assertSearchQuery(IndexableCollection.TYPE, 3, 3, 0, -1);
// check Item type with page=0 and limit=default, we expect: indexableObjects=6, totalFound=6
assertSearchQuery(IndexableItem.TYPE, 6, 6, 0, -1);
// delete col3 and all items that it contained
collectionService.delete(context, col3);
context.restoreAuthSystemState();
// check Collection type with start=0 and limit=default,
// we expect: indexableObjects=2, totalFound=should be 2 but we have 3 ->(1 stale object here)
assertSearchQuery(IndexableCollection.TYPE, 2, 3, 0, -1);
// as the previous query hit the stale object running a new query should lead to a clean situation
assertSearchQuery(IndexableCollection.TYPE, 2, 2, 0, -1);
// similar test over the items
// check Item type with start=0 and limit=default,
// we expect: indexableObjects=3, totalFound=6 (3 stale objects here)
assertSearchQuery(IndexableItem.TYPE, 3, 6, 0, -1);
// as the previous query hit the stale objects running a new query should lead to a clean situation
assertSearchQuery(IndexableItem.TYPE, 3, 3, 0, -1);
}
private void assertSearchQuery(String resourceType, int size) throws SearchServiceException {
assertSearchQuery(resourceType, size, size, 0, -1);
}
private void assertSearchQuery(String resourceType, int size, int totalFound, int start, int limit)
throws SearchServiceException {
DiscoverQuery discoverQuery = new DiscoverQuery();
discoverQuery.setQuery("*:*");
discoverQuery.setStart(start);
discoverQuery.setMaxResults(limit);
discoverQuery.addFilterQueries("search.resourcetype:" + resourceType);
DiscoverResult discoverResult = searchService.search(context, discoverQuery);
List<IndexableObject> indexableObjects = discoverResult.getIndexableObjects();
assertEquals(size, indexableObjects.size());
assertEquals(size, discoverResult.getTotalSearchResults());
assertEquals(totalFound, discoverResult.getTotalSearchResults());
}

View File

@@ -1489,7 +1489,6 @@ log.report.dir = ${dspace.dir}/log
# table rendering when used with the column widths above, but not generally recommended.
# webui.itemlist.tablewidth = 100%
##### SFX Server (OpenURL) #####
# SFX query is appended to this URL. If this property is commented out or

View File

@@ -44,3 +44,8 @@ discovery.facet.namedtype.workspace = 001workspace\n|||\nWorkspace###workspace
discovery.facet.namedtype.workflow.item = 002workflow\n|||\nWorkflow###workflow
discovery.facet.namedtype.workflow.claimed = 003workflow\n|||\nValidation###validation
discovery.facet.namedtype.workflow.pooled = 004workflow\n|||\nWaiting for Controller###waitingforcontroller
# Set the number of retry of a query when stale objects are found.
# Set to -1 if stale objects should be ignored. Set to 0 if you want to avoid extra query but take the chance to cleanup
# the index each time that stale objects are found. Default 3
discovery.removestale.attempts = 3