[DS-732] Further browse refactoring for efficiency

git-svn-id: http://scm.dspace.org/svn/repo/dspace/trunk@5716 9c30dcfa-912a-0410-8fc2-9e0234be79fd
This commit is contained in:
Graham Triggs
2010-11-02 21:29:10 +00:00
parent 8d255f71fb
commit 80c12b1dae
8 changed files with 257 additions and 256 deletions

View File

@@ -61,7 +61,12 @@
</plugins> </plugins>
</build> </build>
<dependencies> <dependencies>
<dependency> <dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-jdbc</artifactId>
<version>2.5.6</version>
</dependency>
<dependency>
<groupId>org.dspace</groupId> <groupId>org.dspace</groupId>
<artifactId>handle</artifactId> <artifactId>handle</artifactId>
<version>6.2</version> <version>6.2</version>

View File

@@ -38,7 +38,9 @@
package org.dspace.browse; package org.dspace.browse;
import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.Map;
import java.util.Set; import java.util.Set;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;
@@ -75,7 +77,7 @@ public class BrowseConsumer implements Consumer
private static Logger log = Logger.getLogger(BrowseConsumer.class); private static Logger log = Logger.getLogger(BrowseConsumer.class);
// items to be updated in browse index // items to be updated in browse index
private Set<Item> toUpdate = null; private Map<Integer, ItemHolder> toUpdate = null;
public void initialize() public void initialize()
throws Exception throws Exception
@@ -88,7 +90,7 @@ public class BrowseConsumer implements Consumer
{ {
if(toUpdate == null) if(toUpdate == null)
{ {
toUpdate = new HashSet<Item>(); toUpdate = new HashMap<Integer, ItemHolder>();
} }
log.debug("consume() evaluating event: " + event.toString()); log.debug("consume() evaluating event: " + event.toString());
@@ -102,26 +104,31 @@ public class BrowseConsumer implements Consumer
// If an Item is created or its metadata is modified.. // If an Item is created or its metadata is modified..
case Constants.ITEM: case Constants.ITEM:
if(et == Event.MODIFY_METADATA || et == Event.CREATE) if (et == Event.MODIFY_METADATA || et == Event.CREATE)
{ {
DSpaceObject subj = event.getSubject(ctx); Item subj = (Item)event.getSubject(ctx);
if (subj != null) if (subj != null)
{ {
log.debug("consume() adding event to update queue: " + event.toString()); log.debug("consume() adding event to update queue: " + event.toString());
toUpdate.add((Item)subj); if (et == Event.CREATE || !toUpdate.containsKey(subj.getID()))
{
toUpdate.put(subj.getID(), new ItemHolder(subj, et == Event.CREATE));
}
} }
} }
break; break;
// track ADD and REMOVE from collections, that changes browse index. // track ADD and REMOVE from collections, that changes browse index.
case Constants.COLLECTION: case Constants.COLLECTION:
if (event.getObjectType() == Constants.ITEM if (event.getObjectType() == Constants.ITEM && (et == Event.ADD || et == Event.REMOVE))
&& (et == Event.ADD || et == Event.REMOVE))
{ {
Item obj = (Item)event.getObject(ctx); Item obj = (Item)event.getObject(ctx);
if (obj != null) if (obj != null)
{ {
log.debug("consume() adding event to update queue: " + event.toString()); log.debug("consume() adding event to update queue: " + event.toString());
toUpdate.add(obj); if (!toUpdate.containsKey(obj.getID()))
{
toUpdate.put(obj.getID(), new ItemHolder(obj, false));
}
} }
} }
break; break;
@@ -139,7 +146,7 @@ public class BrowseConsumer implements Consumer
{ {
// Update/Add items // Update/Add items
for (Item i : toUpdate) for (ItemHolder i : toUpdate.values())
{ {
// FIXME: there is an exception handling problem here // FIXME: there is an exception handling problem here
try try
@@ -147,7 +154,7 @@ public class BrowseConsumer implements Consumer
// Update browse indices // Update browse indices
ctx.turnOffAuthorisationSystem(); ctx.turnOffAuthorisationSystem();
IndexBrowse ib = new IndexBrowse(ctx); IndexBrowse ib = new IndexBrowse(ctx);
ib.indexItem(i); ib.indexItem(i.item, i.createEvent);
ctx.restoreAuthSystemState(); ctx.restoreAuthSystemState();
} }
catch (BrowseException e) catch (BrowseException e)
@@ -159,8 +166,8 @@ public class BrowseConsumer implements Consumer
if (log.isDebugEnabled()) if (log.isDebugEnabled())
{ {
log.debug("Updated browse indices for Item id=" log.debug("Updated browse indices for Item id="
+ String.valueOf(i.getID()) + ", hdl=" + String.valueOf(i.item.getID()) + ", hdl="
+ i.getHandle()); + i.item.getHandle());
} }
} }
@@ -178,4 +185,15 @@ public class BrowseConsumer implements Consumer
public void finish(Context ctx) { public void finish(Context ctx) {
} }
private final class ItemHolder {
private Item item;
private boolean createEvent;
ItemHolder(Item pItem, boolean pCreateEvent)
{
item = pItem;
createEvent = pCreateEvent;
}
}
} }

View File

@@ -39,6 +39,7 @@ package org.dspace.browse;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set;
/** /**
* Interface for any class wishing to provide a browse storage later. This particular * Interface for any class wishing to provide a browse storage later. This particular
@@ -187,7 +188,7 @@ public interface BrowseCreateDAO
* @return the ids of any distinct records that have been unmapped * @return the ids of any distinct records that have been unmapped
* @throws BrowseException * @throws BrowseException
*/ */
public MappingResults updateDistinctMappings(String table, int itemID, int[] distinctIDs) throws BrowseException; public MappingResults updateDistinctMappings(String table, int itemID, Set<Integer> distinctIDs) throws BrowseException;
/** /**
* Find out of a given table exists. * Find out of a given table exists.

View File

@@ -41,9 +41,11 @@ import java.sql.PreparedStatement;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;
import org.dspace.core.Context; import org.dspace.core.Context;
@@ -249,24 +251,18 @@ public class BrowseCreateDAOOracle implements BrowseCreateDAO
/* (non-Javadoc) /* (non-Javadoc)
* @see org.dspace.browse.BrowseCreateDAO#updateDistinctMapping(java.lang.String, int, int) * @see org.dspace.browse.BrowseCreateDAO#updateDistinctMapping(java.lang.String, int, int)
*/ */
public MappingResults updateDistinctMappings(String table, int itemID, int[] distinctIDs) throws BrowseException public MappingResults updateDistinctMappings(String table, int itemID, Set<Integer> distinctIDs) throws BrowseException
{ {
BrowseMappingResults results = new BrowseMappingResults(); BrowseMappingResults results = new BrowseMappingResults();
try try
{ {
// Remove (set to -1) any duplicate distinctIDs Set<Integer> addDistinctIDs = null;
for (int i = 0; i < distinctIDs.length; i++)
{
if (!isFirstOccurrence(distinctIDs, i))
{
distinctIDs[i] = -1;
}
}
// Find all existing mappings for this item // Find all existing mappings for this item
TableRowIterator tri = DatabaseManager.queryTable(context, table, "SELECT * FROM " + table + " WHERE item_id=?", itemID); TableRowIterator tri = DatabaseManager.queryTable(context, table, "SELECT * FROM " + table + " WHERE item_id=?", itemID);
if (tri != null) if (tri != null)
{ {
addDistinctIDs = (Set<Integer>)((HashSet<Integer>)distinctIDs).clone();
try try
{ {
while (tri.hasNext()) while (tri.hasNext())
@@ -276,16 +272,13 @@ public class BrowseCreateDAOOracle implements BrowseCreateDAO
// Check the item mappings to see if it contains this mapping // Check the item mappings to see if it contains this mapping
boolean itemIsMapped = false; boolean itemIsMapped = false;
int trDistinctID = tr.getIntColumn("distinct_id"); int trDistinctID = tr.getIntColumn("distinct_id");
for (int i = 0; i < distinctIDs.length; i++) if (distinctIDs.contains(trDistinctID))
{ {
// Found this mapping // Found this mapping
if (distinctIDs[i] == trDistinctID) results.addRetainedDistinctId(trDistinctID);
{ // Flag it, and remove (-1) from the item mappings
results.addRetainedDistinctId(trDistinctID); itemIsMapped = true;
// Flag it, and remove (-1) from the item mappings addDistinctIDs.remove(trDistinctID);
itemIsMapped = true;
distinctIDs[i] = -1;
}
} }
// The item is no longer mapped to this community, so remove the database record // The item is no longer mapped to this community, so remove the database record
@@ -301,9 +294,13 @@ public class BrowseCreateDAOOracle implements BrowseCreateDAO
tri.close(); tri.close();
} }
} }
else
{
addDistinctIDs = distinctIDs;
}
// Any remaining mappings need to be added to the database // Any remaining mappings need to be added to the database
for (int distinctID : distinctIDs) for (int distinctID : addDistinctIDs)
{ {
if (distinctID > -1) if (distinctID > -1)
{ {

View File

@@ -41,9 +41,11 @@ import java.sql.PreparedStatement;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;
import org.dspace.core.Context; import org.dspace.core.Context;
@@ -246,24 +248,18 @@ public class BrowseCreateDAOPostgres implements BrowseCreateDAO
/* (non-Javadoc) /* (non-Javadoc)
* @see org.dspace.browse.BrowseCreateDAO#updateDistinctMapping(java.lang.String, int, int) * @see org.dspace.browse.BrowseCreateDAO#updateDistinctMapping(java.lang.String, int, int)
*/ */
public MappingResults updateDistinctMappings(String table, int itemID, int[] distinctIDs) throws BrowseException public MappingResults updateDistinctMappings(String table, int itemID, Set<Integer> distinctIDs) throws BrowseException
{ {
BrowseMappingResults results = new BrowseMappingResults(); BrowseMappingResults results = new BrowseMappingResults();
try try
{ {
// Remove (set to -1) any duplicate distinctIDs Set<Integer> addDistinctIDs = null;
for (int i = 0; i < distinctIDs.length; i++)
{
if (!isFirstOccurrence(distinctIDs, i))
{
distinctIDs[i] = -1;
}
}
// Find all existing mappings for this item // Find all existing mappings for this item
TableRowIterator tri = DatabaseManager.queryTable(context, table, "SELECT * FROM " + table + " WHERE item_id=?", itemID); TableRowIterator tri = DatabaseManager.queryTable(context, table, "SELECT * FROM " + table + " WHERE item_id=?", itemID);
if (tri != null) if (tri != null)
{ {
addDistinctIDs = (Set<Integer>)((HashSet<Integer>)distinctIDs).clone();
try try
{ {
while (tri.hasNext()) while (tri.hasNext())
@@ -273,16 +269,13 @@ public class BrowseCreateDAOPostgres implements BrowseCreateDAO
// Check the item mappings to see if it contains this mapping // Check the item mappings to see if it contains this mapping
boolean itemIsMapped = false; boolean itemIsMapped = false;
int trDistinctID = tr.getIntColumn("distinct_id"); int trDistinctID = tr.getIntColumn("distinct_id");
for (int i = 0; i < distinctIDs.length; i++) if (distinctIDs.contains(trDistinctID))
{ {
// Found this mapping // Found this mapping
if (distinctIDs[i] == trDistinctID) results.addRetainedDistinctId(trDistinctID);
{ // Flag it, and remove (-1) from the item mappings
results.addRetainedDistinctId(trDistinctID); itemIsMapped = true;
// Flag it, and remove (-1) from the item mappings addDistinctIDs.remove(trDistinctID);
itemIsMapped = true;
distinctIDs[i] = -1;
}
} }
// The item is no longer mapped to this community, so remove the database record // The item is no longer mapped to this community, so remove the database record
@@ -298,9 +291,13 @@ public class BrowseCreateDAOPostgres implements BrowseCreateDAO
tri.close(); tri.close();
} }
} }
else
{
addDistinctIDs = distinctIDs;
}
// Any remaining mappings need to be added to the database // Any remaining mappings need to be added to the database
for (int distinctID : distinctIDs) for (int distinctID : addDistinctIDs)
{ {
if (distinctID > -1) if (distinctID > -1)
{ {

View File

@@ -295,22 +295,6 @@ public class IndexBrowse
return this.outFile; return this.outFile;
} }
private void removeIndex(int itemID, String table)
throws BrowseException
{
dao.deleteByItemID(table, itemID);
}
private void removeDistinctIndex(int itemID, String distinctTable, String mapTable)
throws BrowseException
{
List<Integer> distinctIds = dao.deleteMappingsByItemID(mapTable, itemID);
if (distinctIds != null && distinctIds.size() > 0)
{
dao.pruneDistinct(distinctTable, mapTable, distinctIds);
}
}
/** /**
* Prune indexes - called from the public interfaces or at the end of a batch indexing process * Prune indexes - called from the public interfaces or at the end of a batch indexing process
*/ */
@@ -342,8 +326,12 @@ public class IndexBrowse
* @param item the item to index * @param item the item to index
* @throws BrowseException * @throws BrowseException
*/ */
public void indexItem(Item item) public void indexItem(Item item) throws BrowseException
throws BrowseException {
indexItem(item, false);
}
void indexItem(Item item, boolean addingNewItem) throws BrowseException
{ {
// If the item is not archived AND has not been withdrawn // If the item is not archived AND has not been withdrawn
// we can assume that it has *never* been archived - in that case, // we can assume that it has *never* been archived - in that case,
@@ -354,9 +342,13 @@ public class IndexBrowse
// isWithdrawn() as FALSE, may result in stale data in the browse tables. // isWithdrawn() as FALSE, may result in stale data in the browse tables.
// Such an update should never occur though, and if it does, probably indicates a major // Such an update should never occur though, and if it does, probably indicates a major
// problem with the code updating the Item. // problem with the code updating the Item.
if (item.isArchived() || item.isWithdrawn()) if (item.isArchived())
{ {
indexItem(new ItemMetadataProxy(item)); indexItem(new ItemMetadataProxy(item), addingNewItem);
}
else if (item.isWithdrawn())
{
indexItem(new ItemMetadataProxy(item), false);
} }
} }
@@ -366,7 +358,7 @@ public class IndexBrowse
* @param item the item to index * @param item the item to index
* @throws BrowseException * @throws BrowseException
*/ */
private void indexItem(ItemMetadataProxy item) private void indexItem(ItemMetadataProxy item, boolean addingNewItem)
throws BrowseException throws BrowseException
{ {
// Map to store the metadata from the Item // Map to store the metadata from the Item
@@ -384,7 +376,7 @@ public class IndexBrowse
{ {
// Record doesn't exist - ensure that it doesn't exist in the withdrawn index, // Record doesn't exist - ensure that it doesn't exist in the withdrawn index,
// and add it to the archived item index // and add it to the archived item index
removeIndex(item.getID(), BrowseIndex.getWithdrawnBrowseIndex().getTableName()); dao.deleteByItemID(BrowseIndex.getWithdrawnBrowseIndex().getTableName(), item.getID());
dao.insertIndex(BrowseIndex.getItemBrowseIndex().getTableName(), item.getID(), sortMap); dao.insertIndex(BrowseIndex.getItemBrowseIndex().getTableName(), item.getID(), sortMap);
} }
@@ -397,15 +389,15 @@ public class IndexBrowse
{ {
// Record doesn't exist - ensure that it doesn't exist in the item index, // Record doesn't exist - ensure that it doesn't exist in the item index,
// and add it to the withdrawn item index // and add it to the withdrawn item index
removeIndex(item.getID(), BrowseIndex.getItemBrowseIndex().getTableName()); dao.deleteByItemID(BrowseIndex.getItemBrowseIndex().getTableName(), item.getID());
dao.insertIndex(BrowseIndex.getWithdrawnBrowseIndex().getTableName(), item.getID(), sortMap); dao.insertIndex(BrowseIndex.getWithdrawnBrowseIndex().getTableName(), item.getID(), sortMap);
} }
} }
else else
{ {
// This item shouldn't exist in either index - ensure that it is removed // This item shouldn't exist in either index - ensure that it is removed
removeIndex(item.getID(), BrowseIndex.getItemBrowseIndex().getTableName()); dao.deleteByItemID(BrowseIndex.getItemBrowseIndex().getTableName(), item.getID());
removeIndex(item.getID(), BrowseIndex.getWithdrawnBrowseIndex().getTableName()); dao.deleteByItemID(BrowseIndex.getWithdrawnBrowseIndex().getTableName(), item.getID());
} }
// Update the community mappings if they are required, or remove them if they aren't // Update the community mappings if they are required, or remove them if they aren't
@@ -441,60 +433,65 @@ public class IndexBrowse
int minConfidence = MetadataAuthorityManager.getManager() int minConfidence = MetadataAuthorityManager.getManager()
.getMinConfidence(values[0].schema, values[0].element, values[0].qualifier); .getMinConfidence(values[0].schema, values[0].element, values[0].qualifier);
for (int x = 0; x < values.length; x++) for (DCValue value : values)
{ {
// Ensure that there is a value to index before inserting it // Ensure that there is a value to index before inserting it
if (StringUtils.isEmpty(values[x].value)) if (StringUtils.isEmpty(value.value))
{ {
log.error("Null metadata value for item " + item.getID() + ", field: " + log.error("Null metadata value for item " + item.getID() + ", field: " +
values[x].schema + "." + value.schema + "." +
values[x].element + value.element +
(values[x].qualifier == null ? "" : "." + values[x].qualifier)); (value.qualifier == null ? "" : "." + value.qualifier));
} }
else else
{ {
if (bis[i].isAuthorityIndex() && if (bis[i].isAuthorityIndex() &&
(values[x].authority == null || values[x].confidence < minConfidence)) (value.authority == null || value.confidence < minConfidence))
{ {
// skip to next value in this authority field if value is not authoritative // skip to next value in this authority field if value is not authoritative
log.debug("Skipping non-authoritative value: "+item.getID()+", field="+values[x].schema+"."+values[x].element+"."+values[x].qualifier+", value="+values[x].value+", authority="+values[x].authority+", confidence="+values[x].confidence+" (BAD AUTHORITY)"); log.debug("Skipping non-authoritative value: " + item.getID() + ", field=" + value.schema + "." + value.element + "." + value.qualifier + ", value=" + value.value + ", authority=" + value.authority + ", confidence=" + value.confidence + " (BAD AUTHORITY)");
continue; continue;
} }
// is there any valid (with appropriate confidence) authority key? // is there any valid (with appropriate confidence) authority key?
if (values[x].authority != null if (value.authority != null
&& values[x].confidence >= minConfidence) && value.confidence >= minConfidence)
{ {
boolean isValueVariants = false; boolean isValueInVariants = false;
// Are there variants of this value
List<String> variants = ChoiceAuthorityManager.getManager() List<String> variants = ChoiceAuthorityManager.getManager()
.getVariants(values[x].schema, values[x].element, values[x].qualifier, .getVariants(value.schema, value.element, value.qualifier,
values[x].authority, values[x].language); value.authority, value.language);
// If we have variants, index them
if (variants != null) if (variants != null)
{ {
for (String var : variants) for (String var : variants)
{ {
String nVal = OrderFormat.makeSortString(var, values[x].language, bis[i].getDataType()); String nVal = OrderFormat.makeSortString(var, value.language, bis[i].getDataType());
distIDSet.add(dao.getDistinctID(bis[i].getDistinctTableName(), var, values[x].authority, nVal)); distIDSet.add(dao.getDistinctID(bis[i].getDistinctTableName(), var, value.authority, nVal));
if (var.equals(values[x].value)) if (var.equals(value.value))
{ {
isValueVariants = true; isValueInVariants = true;
} }
} }
} }
if (!isValueVariants) // If we didn't index the value as one of the variants, add it now
if (!isValueInVariants)
{ {
// get the normalised version of the value // get the normalised version of the value
String nVal = OrderFormat.makeSortString(values[x].value, values[x].language, bis[i].getDataType()); String nVal = OrderFormat.makeSortString(value.value, value.language, bis[i].getDataType());
distIDSet.add(dao.getDistinctID(bis[i].getDistinctTableName(), values[x].value, values[x].authority, nVal)); distIDSet.add(dao.getDistinctID(bis[i].getDistinctTableName(), value.value, value.authority, nVal));
} }
} }
else // put it in the browse index as if it hasn't have an authority key else // put it in the browse index as if it hasn't have an authority key
{ {
// get the normalised version of the value // get the normalised version of the value
String nVal = OrderFormat.makeSortString(values[x].value, values[x].language, bis[i].getDataType()); String nVal = OrderFormat.makeSortString(value.value, value.language, bis[i].getDataType());
distIDSet.add(dao.getDistinctID(bis[i].getDistinctTableName(), values[x].value, null, nVal)); distIDSet.add(dao.getDistinctID(bis[i].getDistinctTableName(), value.value, null, nVal));
} }
} }
} }
@@ -505,19 +502,20 @@ public class IndexBrowse
// Do we have any mappings? // Do we have any mappings?
if (distIDSet.isEmpty()) if (distIDSet.isEmpty())
{ {
// remove any old mappings if (!addingNewItem)
removeDistinctIndex(item.getID(), bis[i].getDistinctTableName(), bis[i].getMapTableName()); {
// remove any old mappings
List<Integer> distinctIds = dao.deleteMappingsByItemID(bis[i].getMapTableName(), item.getID());
if (distinctIds != null && distinctIds.size() > 0)
{
dao.pruneDistinct(bis[i].getDistinctTableName(), bis[i].getMapTableName(), distinctIds);
}
}
} }
else else
{ {
// Update the existing mappings // Update the existing mappings
int[] distIDarr = new int[distIDSet.size()]; MappingResults results = dao.updateDistinctMappings(bis[i].getMapTableName(), item.getID(), distIDSet);
int didx = 0;
for (Integer distID : distIDSet)
{
distIDarr[didx++] = distID;
}
MappingResults results = dao.updateDistinctMappings(bis[i].getMapTableName(), item.getID(), distIDarr);
if (results.getRemovedDistinctIds() != null && results.getRemovedDistinctIds().size() > 0) if (results.getRemovedDistinctIds() != null && results.getRemovedDistinctIds().size() > 0)
{ {
pruneDistinctIndex(bis[i], results.getRemovedDistinctIds()); pruneDistinctIndex(bis[i], results.getRemovedDistinctIds());
@@ -603,32 +601,6 @@ public class IndexBrowse
} }
} }
/**
* @deprecated
* @param item
* @return
* @throws BrowseException
*/
public boolean itemAdded(Item item)
throws BrowseException
{
indexItem(item);
return true;
}
/**
* @deprecated
* @param item
* @return
* @throws BrowseException
*/
public boolean itemChanged(Item item)
throws BrowseException
{
indexItem(item);
return true;
}
/** /**
* remove all the indices for the given item * remove all the indices for the given item
* *
@@ -651,13 +623,13 @@ public class IndexBrowse
if (bis[i].isMetadataIndex()) if (bis[i].isMetadataIndex())
{ {
log.debug("Removing indexing for removed item " + itemID + ", for index: " + bis[i].getTableName()); log.debug("Removing indexing for removed item " + itemID + ", for index: " + bis[i].getTableName());
removeIndex(itemID, bis[i].getMapTableName()); dao.deleteByItemID(bis[i].getMapTableName(), itemID);
} }
} }
// Remove from the item indexes (archive and withdrawn) // Remove from the item indexes (archive and withdrawn)
removeIndex(itemID, BrowseIndex.getItemBrowseIndex().getTableName()); dao.deleteByItemID(BrowseIndex.getItemBrowseIndex().getTableName(), itemID);
removeIndex(itemID, BrowseIndex.getWithdrawnBrowseIndex().getTableName()); dao.deleteByItemID(BrowseIndex.getWithdrawnBrowseIndex().getTableName(), itemID);
dao.deleteCommunityMappings(itemID); dao.deleteCommunityMappings(itemID);
return true; return true;
@@ -672,103 +644,115 @@ public class IndexBrowse
public static void main(String[] argv) public static void main(String[] argv)
throws SQLException, BrowseException, ParseException throws SQLException, BrowseException, ParseException
{ {
Context context = new Context(); Date startTime = new Date();
context.turnOffAuthorisationSystem(); try
IndexBrowse indexer = new IndexBrowse(context); {
Context context = new Context();
// create an options object and populate it context.turnOffAuthorisationSystem();
CommandLineParser parser = new PosixParser(); IndexBrowse indexer = new IndexBrowse(context);
Options options = new Options();
// create an options object and populate it
// these are mutually exclusive, and represent the primary actions CommandLineParser parser = new PosixParser();
options.addOption("t", "tables", false, "create the tables only, do not attempt to index. Mutually exclusive with -f and -i"); Options options = new Options();
options.addOption("i", "index", false, "actually do the indexing. Mutually exclusive with -t and -f");
options.addOption("f", "full", false, "make the tables, and do the indexing. This forces -x. Mutually exclusive with -t and -i"); // these are mutually exclusive, and represent the primary actions
options.addOption("t", "tables", false, "create the tables only, do not attempt to index. Mutually exclusive with -f and -i");
// these options can be specified only with the -f option options.addOption("i", "index", false, "actually do the indexing. Mutually exclusive with -t and -f");
options.addOption("r", "rebuild", false, "should we rebuild all the indices, which removes old index tables and creates new ones. For use with -f. Mutually exclusive with -d"); options.addOption("f", "full", false, "make the tables, and do the indexing. This forces -x. Mutually exclusive with -t and -i");
options.addOption("d", "delete", false, "delete all the indices, but don't create new ones. For use with -f. This is mutually exclusive with -r");
// these options can be specified only with the -f option
// these options can be specified only with the -t and -f options options.addOption("r", "rebuild", false, "should we rebuild all the indices, which removes old index tables and creates new ones. For use with -f. Mutually exclusive with -d");
options.addOption("o", "out", true, "[-o <filename>] write the remove and create SQL to the given file. For use with -t and -f"); // FIXME: not currently working options.addOption("d", "delete", false, "delete all the indices, but don't create new ones. For use with -f. This is mutually exclusive with -r");
options.addOption("p", "print", false, "write the remove and create SQL to the stdout. For use with -t and -f");
options.addOption("x", "execute", false, "execute all the remove and create SQL against the database. For use with -t and -f"); // these options can be specified only with the -t and -f options
options.addOption("s", "start", true, "[-s <int>] start from this index number and work upward (mostly only useful for debugging). For use with -t and -f"); options.addOption("o", "out", true, "[-o <filename>] write the remove and create SQL to the given file. For use with -t and -f"); // FIXME: not currently working
options.addOption("p", "print", false, "write the remove and create SQL to the stdout. For use with -t and -f");
// this option can be used with any argument options.addOption("x", "execute", false, "execute all the remove and create SQL against the database. For use with -t and -f");
options.addOption("v", "verbose", false, "print extra information to the stdout. If used in conjunction with -p, you cannot use the stdout to generate your database structure"); options.addOption("s", "start", true, "[-s <int>] start from this index number and work upward (mostly only useful for debugging). For use with -t and -f");
// display the help. If this is spefified, it trumps all other arguments // this option can be used with any argument
options.addOption("h", "help", false, "show this help documentation. Overrides all other arguments"); options.addOption("v", "verbose", false, "print extra information to the stdout. If used in conjunction with -p, you cannot use the stdout to generate your database structure");
CommandLine line = parser.parse(options, argv); // display the help. If this is spefified, it trumps all other arguments
options.addOption("h", "help", false, "show this help documentation. Overrides all other arguments");
// display the help
if (line.hasOption("h")) CommandLine line = parser.parse(options, argv);
{
indexer.usage(options); // display the help
return; if (line.hasOption("h"))
} {
indexer.usage(options);
if (line.hasOption("v")) return;
{ }
indexer.setVerbose(true);
} if (line.hasOption("v"))
{
if (line.hasOption("i")) indexer.setVerbose(true);
{ }
indexer.createIndex();
return; if (line.hasOption("i"))
} {
indexer.createIndex();
if (line.hasOption("f")) return;
{ }
if (line.hasOption('r'))
{ if (line.hasOption("f"))
indexer.setRebuild(true); {
} if (line.hasOption('r'))
else if (line.hasOption("d")) {
{ indexer.setRebuild(true);
indexer.setDelete(true); }
} else if (line.hasOption("d"))
} {
indexer.setDelete(true);
if (line.hasOption("f") || line.hasOption("t")) }
{ }
if (line.hasOption("s"))
{ if (line.hasOption("f") || line.hasOption("t"))
indexer.setStart(Integer.parseInt(line.getOptionValue("s"))); {
} if (line.hasOption("s"))
if (line.hasOption("x")) {
{ indexer.setStart(Integer.parseInt(line.getOptionValue("s")));
indexer.setExecute(true); }
} if (line.hasOption("x"))
if (line.hasOption("p")) {
{ indexer.setExecute(true);
indexer.setStdOut(true); }
} if (line.hasOption("p"))
if (line.hasOption("o")) {
{ indexer.setStdOut(true);
indexer.setFileOut(true); }
indexer.setOutFile(line.getOptionValue("o")); if (line.hasOption("o"))
} {
} indexer.setFileOut(true);
indexer.setOutFile(line.getOptionValue("o"));
if (line.hasOption("t")) }
{ }
indexer.prepTables();
return; if (line.hasOption("t"))
} {
indexer.prepTables();
if (line.hasOption("f")) return;
{ }
indexer.setExecute(true);
indexer.initBrowse(); if (line.hasOption("f"))
return; {
} indexer.setExecute(true);
indexer.initBrowse();
indexer.usage(options); return;
context.complete(); }
indexer.usage(options);
context.complete();
}
finally
{
Date endTime = new Date();
System.out.println("Started: " + startTime.getTime());
System.out.println("Ended: " + endTime.getTime());
System.out.println("Elapsed time: " + ((endTime.getTime() - startTime.getTime()) / 1000) + " secs (" + (endTime.getTime() - startTime.getTime()) + " msecs)");
}
} }
/** /**
@@ -1162,7 +1146,8 @@ public class IndexBrowse
for (int j = 0; j < items.length; j++) for (int j = 0; j < items.length; j++)
{ {
indexItem(new ItemMetadataProxy(items[j].getID(), items[j])); // Creating the indexes from scracth, so treat each item as if it's new
indexItem(new ItemMetadataProxy(items[j].getID(), items[j]), true);
// after each item we commit the context and clear the cache // after each item we commit the context and clear the cache
context.commit(); context.commit();

View File

@@ -38,9 +38,12 @@ import java.sql.PreparedStatement;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set;
import mockit.Mock; import mockit.Mock;
import mockit.MockClass; import mockit.MockClass;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;
@@ -289,25 +292,19 @@ public class MockBrowseCreateDAOOracle
} }
@Mock @Mock
public MappingResults updateDistinctMappings(String table, int itemID, int[] distinctIDs) throws BrowseException public MappingResults updateDistinctMappings(String table, int itemID, Set<Integer> distinctIDs) throws BrowseException
{ {
BrowseMappingResults results = new BrowseMappingResults(); BrowseMappingResults results = new BrowseMappingResults();
try try
{ {
checkContext(); checkContext();
// Remove (set to -1) any duplicate distinctIDs Set<Integer> addDistinctIDs = null;
for (int i = 0; i < distinctIDs.length; i++)
{
if (!isFirstOccurrence(distinctIDs, i))
{
distinctIDs[i] = -1;
}
}
// Find all existing mappings for this item // Find all existing mappings for this item
TableRowIterator tri = DatabaseManager.queryTable(internalContext, table, "SELECT * FROM " + table + " WHERE item_id=?", itemID); TableRowIterator tri = DatabaseManager.queryTable(internalContext, table, "SELECT * FROM " + table + " WHERE item_id=?", itemID);
if (tri != null) if (tri != null)
{ {
addDistinctIDs = (Set<Integer>)((HashSet<Integer>)distinctIDs).clone();
try try
{ {
while (tri.hasNext()) while (tri.hasNext())
@@ -317,16 +314,13 @@ public class MockBrowseCreateDAOOracle
// Check the item mappings to see if it contains this mapping // Check the item mappings to see if it contains this mapping
boolean itemIsMapped = false; boolean itemIsMapped = false;
int trDistinctID = tr.getIntColumn("distinct_id"); int trDistinctID = tr.getIntColumn("distinct_id");
for (int i = 0; i < distinctIDs.length; i++) if (distinctIDs.contains(trDistinctID))
{ {
// Found this mapping // Found this mapping
if (distinctIDs[i] == trDistinctID) results.addRetainedDistinctId(trDistinctID);
{ // Flag it, and remove (-1) from the item mappings
results.addRetainedDistinctId(trDistinctID); itemIsMapped = true;
// Flag it, and remove (-1) from the item mappings addDistinctIDs.remove(trDistinctID);
itemIsMapped = true;
distinctIDs[i] = -1;
}
} }
// The item is no longer mapped to this community, so remove the database record // The item is no longer mapped to this community, so remove the database record
@@ -342,9 +336,13 @@ public class MockBrowseCreateDAOOracle
tri.close(); tri.close();
} }
} }
else
{
addDistinctIDs = distinctIDs;
}
// Any remaining mappings need to be added to the database // Any remaining mappings need to be added to the database
for (int distinctID : distinctIDs) for (int distinctID : addDistinctIDs)
{ {
if (distinctID > -1) if (distinctID > -1)
{ {

View File

@@ -227,7 +227,7 @@ public class ItemMapServlet extends DSpaceServlet
try try
{ {
IndexBrowse ib = new IndexBrowse(context); IndexBrowse ib = new IndexBrowse(context);
ib.itemChanged(myItem); ib.indexItem(myItem);
} }
catch (BrowseException e) catch (BrowseException e)
{ {
@@ -277,7 +277,7 @@ public class ItemMapServlet extends DSpaceServlet
try try
{ {
IndexBrowse ib = new IndexBrowse(context); IndexBrowse ib = new IndexBrowse(context);
ib.itemChanged(myItem); ib.indexItem(myItem);
} }
catch (BrowseException e) catch (BrowseException e)
{ {