Merge branch 'DSpace:main' into main

This commit is contained in:
Max Nuding
2023-03-31 08:13:16 +02:00
committed by GitHub
6 changed files with 92 additions and 47 deletions

View File

@@ -315,25 +315,25 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
// check if destination bitstream exists
Bundle existingBundle = null;
Bitstream existingBitstream = null;
List<Bitstream> existingBitstreams = new ArrayList<Bitstream>();
List<Bundle> bundles = itemService.getBundles(item, formatFilter.getBundleName());
if (bundles.size() > 0) {
// only finds the last match (FIXME?)
// only finds the last matching bundle and all matching bitstreams in the proper bundle(s)
for (Bundle bundle : bundles) {
List<Bitstream> bitstreams = bundle.getBitstreams();
for (Bitstream bitstream : bitstreams) {
if (bitstream.getName().trim().equals(newName.trim())) {
existingBundle = bundle;
existingBitstream = bitstream;
existingBitstreams.add(bitstream);
}
}
}
}
// if exists and overwrite = false, exit
if (!overWrite && (existingBitstream != null)) {
if (!overWrite && (existingBitstreams.size() > 0)) {
if (!isQuiet) {
logInfo("SKIPPED: bitstream " + source.getID()
+ " (item: " + item.getHandle() + ") because '" + newName + "' already exists");
@@ -408,9 +408,8 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
logError("!!! OutOfMemoryError !!!");
}
// fixme - set date?
// we are overwriting, so remove old bitstream
if (existingBitstream != null) {
for (Bitstream existingBitstream : existingBitstreams) {
bundleService.removeBitstream(context, existingBundle, existingBitstream);
}

View File

@@ -56,37 +56,18 @@ public class IndexClient extends DSpaceRunnable<IndexDiscoveryScriptConfiguratio
* new DSpace.getServiceManager().getServiceByName("org.dspace.discovery.SolrIndexer");
*/
if (indexClientOptions == IndexClientOptions.REMOVE) {
handler.logInfo("Removing " + commandLine.getOptionValue("r") + " from Index");
indexer.unIndexContent(context, commandLine.getOptionValue("r"));
} else if (indexClientOptions == IndexClientOptions.CLEAN) {
handler.logInfo("Cleaning Index");
indexer.cleanIndex();
} else if (indexClientOptions == IndexClientOptions.DELETE) {
handler.logInfo("Deleting Index");
indexer.deleteIndex();
} else if (indexClientOptions == IndexClientOptions.BUILD ||
indexClientOptions == IndexClientOptions.BUILDANDSPELLCHECK) {
handler.logInfo("(Re)building index from scratch.");
indexer.deleteIndex();
indexer.createIndex(context);
if (indexClientOptions == IndexClientOptions.BUILDANDSPELLCHECK) {
checkRebuildSpellCheck(commandLine, indexer);
}
} else if (indexClientOptions == IndexClientOptions.OPTIMIZE) {
handler.logInfo("Optimizing search core.");
indexer.optimize();
} else if (indexClientOptions == IndexClientOptions.SPELLCHECK) {
checkRebuildSpellCheck(commandLine, indexer);
} else if (indexClientOptions == IndexClientOptions.INDEX) {
final String param = commandLine.getOptionValue('i');
Optional<IndexableObject> indexableObject = Optional.empty();
if (indexClientOptions == IndexClientOptions.REMOVE || indexClientOptions == IndexClientOptions.INDEX) {
final String param = indexClientOptions == IndexClientOptions.REMOVE ? commandLine.getOptionValue('r') :
commandLine.getOptionValue('i');
UUID uuid = null;
try {
uuid = UUID.fromString(param);
} catch (Exception e) {
// nothing to do, it should be an handle
// nothing to do, it should be a handle
}
Optional<IndexableObject> indexableObject = Optional.empty();
if (uuid != null) {
final Item item = ContentServiceFactory.getInstance().getItemService().find(context, uuid);
if (item != null) {
@@ -118,7 +99,32 @@ public class IndexClient extends DSpaceRunnable<IndexDiscoveryScriptConfiguratio
if (!indexableObject.isPresent()) {
throw new IllegalArgumentException("Cannot resolve " + param + " to a DSpace object");
}
handler.logInfo("Indexing " + param + " force " + commandLine.hasOption("f"));
}
if (indexClientOptions == IndexClientOptions.REMOVE) {
handler.logInfo("Removing " + commandLine.getOptionValue("r") + " from Index");
indexer.unIndexContent(context, indexableObject.get().getUniqueIndexID());
} else if (indexClientOptions == IndexClientOptions.CLEAN) {
handler.logInfo("Cleaning Index");
indexer.cleanIndex();
} else if (indexClientOptions == IndexClientOptions.DELETE) {
handler.logInfo("Deleting Index");
indexer.deleteIndex();
} else if (indexClientOptions == IndexClientOptions.BUILD ||
indexClientOptions == IndexClientOptions.BUILDANDSPELLCHECK) {
handler.logInfo("(Re)building index from scratch.");
indexer.deleteIndex();
indexer.createIndex(context);
if (indexClientOptions == IndexClientOptions.BUILDANDSPELLCHECK) {
checkRebuildSpellCheck(commandLine, indexer);
}
} else if (indexClientOptions == IndexClientOptions.OPTIMIZE) {
handler.logInfo("Optimizing search core.");
indexer.optimize();
} else if (indexClientOptions == IndexClientOptions.SPELLCHECK) {
checkRebuildSpellCheck(commandLine, indexer);
} else if (indexClientOptions == IndexClientOptions.INDEX) {
handler.logInfo("Indexing " + commandLine.getOptionValue('i') + " force " + commandLine.hasOption("f"));
final long startTimeMillis = System.currentTimeMillis();
final long count = indexAll(indexer, ContentServiceFactory.getInstance().
getItemService(), context, indexableObject.get());
@@ -179,7 +185,7 @@ public class IndexClient extends DSpaceRunnable<IndexDiscoveryScriptConfiguratio
indexingService.indexContent(context, dso, true, true);
count++;
if (dso.getIndexedObject() instanceof Community) {
final Community community = (Community) dso;
final Community community = (Community) dso.getIndexedObject();
final String communityHandle = community.getHandle();
for (final Community subcommunity : community.getSubcommunities()) {
count += indexAll(indexingService, itemService, context, new IndexableCommunity(subcommunity));

View File

@@ -107,6 +107,10 @@ public class IndexingUtils {
ArrayList<String> prefixedIds = new ArrayList<>();
for (int auth : authorizations) {
for (ResourcePolicy policy : authService.getPoliciesActionFilter(context, obj, auth)) {
// Avoid NPE in cases where the policy does not have group or eperson
if (policy.getGroup() == null && policy.getEPerson() == null) {
continue;
}
String prefixedId = policy.getGroup() == null
? "e" + policy.getEPerson().getID()
: "g" + policy.getGroup().getID();

View File

@@ -256,7 +256,12 @@ public class SolrServiceImpl implements SearchService, IndexingService {
try {
if (solrSearchCore.getSolr() != null) {
indexObjectServiceFactory.getIndexableObjectFactory(searchUniqueID).delete(searchUniqueID);
IndexFactory index = indexObjectServiceFactory.getIndexableObjectFactory(searchUniqueID);
if (index != null) {
index.delete(searchUniqueID);
} else {
log.warn("Object not found in Solr index: " + searchUniqueID);
}
if (commit) {
solrSearchCore.getSolr().commit();
}

View File

@@ -202,17 +202,18 @@ public class ConverterService {
* @throws ClassCastException if the converter's return type is not compatible with the inferred return type.
*/
public <M, R> Page<R> toRestPage(List<M> modelObjects, Pageable pageable, Projection projection) {
if (pageable == null) {
pageable = utils.getPageable(pageable);
}
List<M> pageableObjects = utils.getPageObjectList(modelObjects, pageable);
List<R> transformedList = new LinkedList<>();
for (M modelObject : modelObjects) {
for (M modelObject : pageableObjects) {
R transformedObject = toRest(modelObject, projection);
if (transformedObject != null) {
transformedList.add(transformedObject);
}
}
if (pageable == null) {
pageable = utils.getPageable(pageable);
}
return utils.getPage(transformedList, pageable);
return new PageImpl(transformedList, pageable, modelObjects.size());
}
/**

View File

@@ -151,20 +151,50 @@ public class Utils {
public <T> Page<T> getPage(List<T> fullContents, @Nullable Pageable optionalPageable) {
Pageable pageable = getPageable(optionalPageable);
int total = fullContents.size();
List<T> pageContent = null;
if (pageable.getOffset() > total) {
throw new PaginationException(total);
} else {
if (pageable.getOffset() + pageable.getPageSize() > total) {
pageContent = fullContents.subList(Math.toIntExact(pageable.getOffset()), total);
} else {
pageContent = fullContents.subList(Math.toIntExact(pageable.getOffset()),
Math.toIntExact(pageable.getOffset()) + pageable.getPageSize());
}
List<T> pageContent = getListSlice(fullContents, pageable);
return new PageImpl<>(pageContent, pageable, total);
}
}
/**
* Returns list of objects for the current page.
* @param fullList the complete list of objects
* @param optionalPageable
* @return list of page objects
* @param <T>
*/
public <T> List<T> getPageObjectList(List<T> fullList, @Nullable Pageable optionalPageable) {
Pageable pageable = getPageable(optionalPageable);
int total = fullList.size();
if (pageable.getOffset() > total) {
throw new PaginationException(total);
} else {
return getListSlice(fullList, pageable);
}
}
/**
* Returns the list elements required for the page
* @param fullList the complete list of objects
* @param pageable
* @return list of page objects
* @param <T>
*/
private <T> List<T> getListSlice(List<T> fullList, Pageable pageable) {
int total = fullList.size();
List<T> pageContent = null;
if (pageable.getOffset() + pageable.getPageSize() > total) {
pageContent = fullList.subList(Math.toIntExact(pageable.getOffset()), total);
} else {
pageContent = fullList.subList(Math.toIntExact(pageable.getOffset()),
Math.toIntExact(pageable.getOffset()) + pageable.getPageSize());
}
return pageContent;
}
/**
* Convenience method to get a default pageable instance if needed.
*