Fix various problems with resources potentially not being freed, and other minor fixes suggested by FindBugs

git-svn-id: http://scm.dspace.org/svn/repo/branches/dspace-1_5_x@3036 9c30dcfa-912a-0410-8fc2-9e0234be79fd
This commit is contained in:
Graham Triggs
2008-08-06 15:52:30 +00:00
parent 44cdd859a8
commit 9ae95a94e7
42 changed files with 1972 additions and 1190 deletions

View File

@@ -242,9 +242,18 @@ public class METSExport
+ File.separator + "config" + File.separator + "dc2mods.cfg"; + File.separator + "config" + File.separator + "dc2mods.cfg";
// Read it in // Read it in
InputStream is = new FileInputStream(configFile); InputStream is = null;
dcToMODS = new Properties(); try
dcToMODS.load(is); {
is = new FileInputStream(configFile);
dcToMODS = new Properties();
dcToMODS.load(is);
}
finally
{
if (is != null)
try { is.close(); } catch (IOException ioe) { }
}
} }
/** /**

View File

@@ -562,30 +562,41 @@ public class ReportGenerator
{ {
FileReader fr = null; FileReader fr = null;
BufferedReader br = null; BufferedReader br = null;
// read in the map file, printing a warning if none is found try
String record = null;
try
{
fr = new FileReader(map);
br = new BufferedReader(fr);
}
catch (IOException e)
{
System.err.println("Failed to read map file: log file actions will be displayed without translation");
return;
}
// loop through the map file and read in the values
while ((record = br.readLine()) != null)
{ {
Matcher matchReal = real.matcher(record); // read in the map file, printing a warning if none is found
String record = null;
// if the line is real then read it in try
if (matchReal.matches())
{ {
actionMap.put(matchReal.group(1).trim(), matchReal.group(2).trim()); fr = new FileReader(map);
br = new BufferedReader(fr);
} }
catch (IOException e)
{
System.err.println("Failed to read map file: log file actions will be displayed without translation");
return;
}
// loop through the map file and read in the values
while ((record = br.readLine()) != null)
{
Matcher matchReal = real.matcher(record);
// if the line is real then read it in
if (matchReal.matches())
{
actionMap.put(matchReal.group(1).trim(), matchReal.group(2).trim());
}
}
}
finally
{
if (br != null)
try { br.close(); } catch (IOException ioe) { }
if (fr != null)
try { fr.close(); } catch (IOException ioe) { }
} }
} }

View File

@@ -140,12 +140,13 @@ public class X509Authentication
// First look for keystore full of trusted certs. // First look for keystore full of trusted certs.
if (keystorePath != null) if (keystorePath != null)
{ {
FileInputStream fis = null;
if (keystorePassword == null) if (keystorePassword == null)
keystorePassword = ""; keystorePassword = "";
try { try {
KeyStore ks = KeyStore.getInstance("JKS"); KeyStore ks = KeyStore.getInstance("JKS");
ks.load(new FileInputStream(keystorePath), fis = new FileInputStream(keystorePath);
keystorePassword.toCharArray()); ks.load(fis, keystorePassword.toCharArray());
caCertKeyStore = ks; caCertKeyStore = ks;
} }
catch (IOException e) catch (IOException e)
@@ -158,14 +159,22 @@ public class X509Authentication
log.error("X509Authentication: Failed to extract CA keystore, file="+ log.error("X509Authentication: Failed to extract CA keystore, file="+
keystorePath+", error="+e.toString()); keystorePath+", error="+e.toString());
} }
finally
{
if (fis != null)
try { fis.close(); } catch (IOException ioe) { }
}
} }
// Second, try getting public key out of CA cert, if that's configured. // Second, try getting public key out of CA cert, if that's configured.
if (caCertPath != null) if (caCertPath != null)
{ {
InputStream is = null;
FileInputStream fis = null;
try try
{ {
InputStream is = new BufferedInputStream(new FileInputStream(caCertPath)); fis = new FileInputStream(caCertPath);
is = new BufferedInputStream(fis);
X509Certificate cert = (X509Certificate) CertificateFactory X509Certificate cert = (X509Certificate) CertificateFactory
.getInstance("X.509").generateCertificate(is); .getInstance("X.509").generateCertificate(is);
if (cert != null) if (cert != null)
@@ -181,6 +190,14 @@ public class X509Authentication
log.error("X509Authentication: Failed to extract CA cert, file="+ log.error("X509Authentication: Failed to extract CA cert, file="+
caCertPath+", error="+e.toString()); caCertPath+", error="+e.toString());
} }
finally
{
if (is != null)
try { is.close(); } catch (IOException ioe) { }
if (fis != null)
try { fis.close(); } catch (IOException ioe) { }
}
} }
} }

View File

@@ -424,24 +424,31 @@ public class AuthorizeManager
List<ResourcePolicy> policies = new ArrayList(); List<ResourcePolicy> policies = new ArrayList();
while (tri.hasNext()) try
{ {
TableRow row = tri.next(); while (tri.hasNext())
// first check the cache (FIXME: is this right?)
ResourcePolicy cachepolicy = (ResourcePolicy) c.fromCache(
ResourcePolicy.class, row.getIntColumn("policy_id"));
if (cachepolicy != null)
{ {
policies.add(cachepolicy); TableRow row = tri.next();
}
else // first check the cache (FIXME: is this right?)
{ ResourcePolicy cachepolicy = (ResourcePolicy) c.fromCache(
policies.add(new ResourcePolicy(c, row)); ResourcePolicy.class, row.getIntColumn("policy_id"));
if (cachepolicy != null)
{
policies.add(cachepolicy);
}
else
{
policies.add(new ResourcePolicy(c, row));
}
} }
} }
tri.close(); finally
{
if (tri != null)
tri.close();
}
return policies; return policies;
} }
@@ -463,24 +470,31 @@ public class AuthorizeManager
List<ResourcePolicy> policies = new ArrayList<ResourcePolicy>(); List<ResourcePolicy> policies = new ArrayList<ResourcePolicy>();
while (tri.hasNext()) try
{ {
TableRow row = tri.next(); while (tri.hasNext())
// first check the cache (FIXME: is this right?)
ResourcePolicy cachepolicy = (ResourcePolicy) c.fromCache(
ResourcePolicy.class, row.getIntColumn("policy_id"));
if (cachepolicy != null)
{ {
policies.add(cachepolicy); TableRow row = tri.next();
}
else // first check the cache (FIXME: is this right?)
{ ResourcePolicy cachepolicy = (ResourcePolicy) c.fromCache(
policies.add(new ResourcePolicy(c, row)); ResourcePolicy.class, row.getIntColumn("policy_id"));
if (cachepolicy != null)
{
policies.add(cachepolicy);
}
else
{
policies.add(new ResourcePolicy(c, row));
}
} }
} }
tri.close(); finally
{
if (tri != null)
tri.close();
}
return policies; return policies;
} }
@@ -507,24 +521,31 @@ public class AuthorizeManager
List<ResourcePolicy> policies = new ArrayList<ResourcePolicy>(); List<ResourcePolicy> policies = new ArrayList<ResourcePolicy>();
while (tri.hasNext()) try
{ {
TableRow row = tri.next(); while (tri.hasNext())
// first check the cache (FIXME: is this right?)
ResourcePolicy cachepolicy = (ResourcePolicy) c.fromCache(
ResourcePolicy.class, row.getIntColumn("policy_id"));
if (cachepolicy != null)
{ {
policies.add(cachepolicy); TableRow row = tri.next();
}
else // first check the cache (FIXME: is this right?)
{ ResourcePolicy cachepolicy = (ResourcePolicy) c.fromCache(
policies.add(new ResourcePolicy(c, row)); ResourcePolicy.class, row.getIntColumn("policy_id"));
if (cachepolicy != null)
{
policies.add(cachepolicy);
}
else
{
policies.add(new ResourcePolicy(c, row));
}
} }
} }
tri.close(); finally
{
if (tri != null)
tri.close();
}
return policies; return policies;
} }
@@ -697,37 +718,44 @@ public class AuthorizeManager
TableRowIterator tri = DatabaseManager.queryTable(c, "resourcepolicy", TableRowIterator tri = DatabaseManager.queryTable(c, "resourcepolicy",
"SELECT * FROM resourcepolicy WHERE resource_type_id= ? "+ "SELECT * FROM resourcepolicy WHERE resource_type_id= ? "+
"AND resource_id= ? AND action_id= ? ",o.getType(),o.getID(),actionID); "AND resource_id= ? AND action_id= ? ",o.getType(),o.getID(),actionID);
List<Group> groups = new ArrayList<Group>(); List<Group> groups = new ArrayList<Group>();
try
while (tri.hasNext())
{ {
TableRow row = tri.next();
// first check the cache (FIXME: is this right?) while (tri.hasNext())
ResourcePolicy cachepolicy = (ResourcePolicy) c.fromCache(
ResourcePolicy.class, row.getIntColumn("policy_id"));
ResourcePolicy myPolicy = null;
if (cachepolicy != null)
{ {
myPolicy = cachepolicy; TableRow row = tri.next();
}
else
{
myPolicy = new ResourcePolicy(c, row);
}
// now do we have a group? // first check the cache (FIXME: is this right?)
Group myGroup = myPolicy.getGroup(); ResourcePolicy cachepolicy = (ResourcePolicy) c.fromCache(
ResourcePolicy.class, row.getIntColumn("policy_id"));
if (myGroup != null) ResourcePolicy myPolicy = null;
{
groups.add(myGroup); if (cachepolicy != null)
{
myPolicy = cachepolicy;
}
else
{
myPolicy = new ResourcePolicy(c, row);
}
// now do we have a group?
Group myGroup = myPolicy.getGroup();
if (myGroup != null)
{
groups.add(myGroup);
}
} }
} }
tri.close(); finally
{
if (tri != null)
tri.close();
}
Group[] groupArray = new Group[groups.size()]; Group[] groupArray = new Group[groups.size()];
groupArray = groups.toArray(groupArray); groupArray = groups.toArray(groupArray);

View File

@@ -96,11 +96,12 @@ public class ItemCountDAOOracle implements ItemCountDAO
public void collectionCount(Collection collection, int count) public void collectionCount(Collection collection, int count)
throws ItemCountException throws ItemCountException
{ {
try TableRowIterator tri = null;
try
{ {
// first find out if we have a record // first find out if we have a record
Object[] sparams = { new Integer(collection.getID()) }; Object[] sparams = { new Integer(collection.getID()) };
TableRowIterator tri = DatabaseManager.query(context, collectionSelect, sparams); tri = DatabaseManager.query(context, collectionSelect, sparams);
if (tri.hasNext()) if (tri.hasNext())
{ {
@@ -112,15 +113,18 @@ public class ItemCountDAOOracle implements ItemCountDAO
Object[] params = { new Integer(collection.getID()), new Integer(count) }; Object[] params = { new Integer(collection.getID()), new Integer(count) };
DatabaseManager.updateQuery(context, collectionInsert, params); DatabaseManager.updateQuery(context, collectionInsert, params);
} }
tri.close();
} }
catch (SQLException e) catch (SQLException e)
{ {
log.error("caught exception: ", e); log.error("caught exception: ", e);
throw new ItemCountException(e); throw new ItemCountException(e);
} }
} finally
{
if (tri != null)
tri.close();
}
}
/** /**
* Store the count of the given community * Store the count of the given community
@@ -132,11 +136,12 @@ public class ItemCountDAOOracle implements ItemCountDAO
public void communityCount(Community community, int count) public void communityCount(Community community, int count)
throws ItemCountException throws ItemCountException
{ {
try TableRowIterator tri = null;
try
{ {
// first find out if we have a record // first find out if we have a record
Object[] sparams = { new Integer(community.getID()) }; Object[] sparams = { new Integer(community.getID()) };
TableRowIterator tri = DatabaseManager.query(context, communitySelect, sparams); tri = DatabaseManager.query(context, communitySelect, sparams);
if (tri.hasNext()) if (tri.hasNext())
{ {
@@ -148,15 +153,18 @@ public class ItemCountDAOOracle implements ItemCountDAO
Object[] params = { new Integer(community.getID()), new Integer(count) }; Object[] params = { new Integer(community.getID()), new Integer(count) };
DatabaseManager.updateQuery(context, communityInsert, params); DatabaseManager.updateQuery(context, communityInsert, params);
} }
tri.close();
} }
catch (SQLException e) catch (SQLException e)
{ {
log.error("caught exception: ", e); log.error("caught exception: ", e);
throw new ItemCountException(e); throw new ItemCountException(e);
} }
} finally
{
if (tri != null)
tri.close();
}
}
/** /**
* Set the dspace context to use * Set the dspace context to use
@@ -268,10 +276,11 @@ public class ItemCountDAOOracle implements ItemCountDAO
private int getCollectionCount(Collection collection) private int getCollectionCount(Collection collection)
throws ItemCountException throws ItemCountException
{ {
try TableRowIterator tri = null;
try
{ {
Object[] params = { new Integer(collection.getID()) }; Object[] params = { new Integer(collection.getID()) };
TableRowIterator tri = DatabaseManager.query(context, collectionSelect, params); tri = DatabaseManager.query(context, collectionSelect, params);
if (!tri.hasNext()) if (!tri.hasNext())
{ {
@@ -284,9 +293,7 @@ public class ItemCountDAOOracle implements ItemCountDAO
{ {
throw new ItemCountException("More than one count row in the database"); throw new ItemCountException("More than one count row in the database");
} }
tri.close();
return tr.getIntColumn("count"); return tr.getIntColumn("count");
} }
catch (SQLException e) catch (SQLException e)
@@ -294,7 +301,12 @@ public class ItemCountDAOOracle implements ItemCountDAO
log.error("caught exception: ", e); log.error("caught exception: ", e);
throw new ItemCountException(e); throw new ItemCountException(e);
} }
} finally
{
if (tri != null)
tri.close();
}
}
/** /**
* get the count for the given community * get the count for the given community
@@ -306,10 +318,11 @@ public class ItemCountDAOOracle implements ItemCountDAO
private int getCommunityCount(Community community) private int getCommunityCount(Community community)
throws ItemCountException throws ItemCountException
{ {
try TableRowIterator tri = null;
try
{ {
Object[] params = { new Integer(community.getID()) }; Object[] params = { new Integer(community.getID()) };
TableRowIterator tri = DatabaseManager.query(context, communitySelect, params); tri = DatabaseManager.query(context, communitySelect, params);
if (!tri.hasNext()) if (!tri.hasNext())
{ {
@@ -322,9 +335,7 @@ public class ItemCountDAOOracle implements ItemCountDAO
{ {
throw new ItemCountException("More than one count row in the database"); throw new ItemCountException("More than one count row in the database");
} }
tri.close();
return tr.getIntColumn("count"); return tr.getIntColumn("count");
} }
catch (SQLException e) catch (SQLException e)
@@ -332,5 +343,10 @@ public class ItemCountDAOOracle implements ItemCountDAO
log.error("caught exception: ", e); log.error("caught exception: ", e);
throw new ItemCountException(e); throw new ItemCountException(e);
} }
} finally
{
if (tri != null)
tri.close();
}
}
} }

View File

@@ -96,11 +96,12 @@ public class ItemCountDAOPostgres implements ItemCountDAO
public void collectionCount(Collection collection, int count) public void collectionCount(Collection collection, int count)
throws ItemCountException throws ItemCountException
{ {
try TableRowIterator tri = null;
try
{ {
// first find out if we have a record // first find out if we have a record
Object[] sparams = { new Integer(collection.getID()) }; Object[] sparams = { new Integer(collection.getID()) };
TableRowIterator tri = DatabaseManager.query(context, collectionSelect, sparams); tri = DatabaseManager.query(context, collectionSelect, sparams);
if (tri.hasNext()) if (tri.hasNext())
{ {
@@ -112,15 +113,18 @@ public class ItemCountDAOPostgres implements ItemCountDAO
Object[] params = { new Integer(collection.getID()), new Integer(count) }; Object[] params = { new Integer(collection.getID()), new Integer(count) };
DatabaseManager.updateQuery(context, collectionInsert, params); DatabaseManager.updateQuery(context, collectionInsert, params);
} }
tri.close();
} }
catch (SQLException e) catch (SQLException e)
{ {
log.error("caught exception: ", e); log.error("caught exception: ", e);
throw new ItemCountException(e); throw new ItemCountException(e);
} }
} finally
{
if (tri != null)
tri.close();
}
}
/** /**
* Store the count of the given community * Store the count of the given community
@@ -132,11 +136,12 @@ public class ItemCountDAOPostgres implements ItemCountDAO
public void communityCount(Community community, int count) public void communityCount(Community community, int count)
throws ItemCountException throws ItemCountException
{ {
try TableRowIterator tri = null;
try
{ {
// first find out if we have a record // first find out if we have a record
Object[] sparams = { new Integer(community.getID()) }; Object[] sparams = { new Integer(community.getID()) };
TableRowIterator tri = DatabaseManager.query(context, communitySelect, sparams); tri = DatabaseManager.query(context, communitySelect, sparams);
if (tri.hasNext()) if (tri.hasNext())
{ {
@@ -148,15 +153,18 @@ public class ItemCountDAOPostgres implements ItemCountDAO
Object[] params = { new Integer(community.getID()), new Integer(count) }; Object[] params = { new Integer(community.getID()), new Integer(count) };
DatabaseManager.updateQuery(context, communityInsert, params); DatabaseManager.updateQuery(context, communityInsert, params);
} }
tri.close();
} }
catch (SQLException e) catch (SQLException e)
{ {
log.error("caught exception: ", e); log.error("caught exception: ", e);
throw new ItemCountException(e); throw new ItemCountException(e);
} }
} finally
{
if (tri != null)
tri.close();
}
}
/** /**
* Set the dspace context to use * Set the dspace context to use
@@ -268,10 +276,11 @@ public class ItemCountDAOPostgres implements ItemCountDAO
private int getCollectionCount(Collection collection) private int getCollectionCount(Collection collection)
throws ItemCountException throws ItemCountException
{ {
try TableRowIterator tri = null;
try
{ {
Object[] params = { new Integer(collection.getID()) }; Object[] params = { new Integer(collection.getID()) };
TableRowIterator tri = DatabaseManager.query(context, collectionSelect, params); tri = DatabaseManager.query(context, collectionSelect, params);
if (!tri.hasNext()) if (!tri.hasNext())
{ {
@@ -284,9 +293,7 @@ public class ItemCountDAOPostgres implements ItemCountDAO
{ {
throw new ItemCountException("More than one count row in the database"); throw new ItemCountException("More than one count row in the database");
} }
tri.close();
return tr.getIntColumn("count"); return tr.getIntColumn("count");
} }
catch (SQLException e) catch (SQLException e)
@@ -294,7 +301,12 @@ public class ItemCountDAOPostgres implements ItemCountDAO
log.error("caught exception: ", e); log.error("caught exception: ", e);
throw new ItemCountException(e); throw new ItemCountException(e);
} }
} finally
{
if (tri != null)
tri.close();
}
}
/** /**
* get the count for the given community * get the count for the given community
@@ -306,10 +318,11 @@ public class ItemCountDAOPostgres implements ItemCountDAO
private int getCommunityCount(Community community) private int getCommunityCount(Community community)
throws ItemCountException throws ItemCountException
{ {
try TableRowIterator tri = null;
try
{ {
Object[] params = { new Integer(community.getID()) }; Object[] params = { new Integer(community.getID()) };
TableRowIterator tri = DatabaseManager.query(context, communitySelect, params); tri = DatabaseManager.query(context, communitySelect, params);
if (!tri.hasNext()) if (!tri.hasNext())
{ {
@@ -322,9 +335,7 @@ public class ItemCountDAOPostgres implements ItemCountDAO
{ {
throw new ItemCountException("More than one count row in the database"); throw new ItemCountException("More than one count row in the database");
} }
tri.close();
return tr.getIntColumn("count"); return tr.getIntColumn("count");
} }
catch (SQLException e) catch (SQLException e)
@@ -332,5 +343,10 @@ public class ItemCountDAOPostgres implements ItemCountDAO
log.error("caught exception: ", e); log.error("caught exception: ", e);
throw new ItemCountException(e); throw new ItemCountException(e);
} }
} finally
{
if (tri != null)
tri.close();
}
}
} }

View File

@@ -98,59 +98,62 @@ public class HandleDispatcher implements BitstreamDispatcher
* @throws SQLException * @throws SQLException
* if database access fails. * if database access fails.
*/ */
private void init() private synchronized void init()
{ {
Context context = null; if (init == Boolean.FALSE)
int dsoType = -1;
int id = -1;
try
{ {
context = new Context(); Context context = null;
DSpaceObject dso = HandleManager.resolveToObject(context, handle); int dsoType = -1;
id = dso.getID();
dsoType = dso.getType();
context.abort();
} int id = -1;
catch (SQLException e) try
{
LOG.error("init error " + e.getMessage(), e);
throw new RuntimeException("init error" + e.getMessage(), e);
}
finally
{
// Abort the context if it's still valid
if ((context != null) && context.isValid())
{ {
context = new Context();
DSpaceObject dso = HandleManager.resolveToObject(context, handle);
id = dso.getID();
dsoType = dso.getType();
context.abort(); context.abort();
} }
catch (SQLException e)
{
LOG.error("init error " + e.getMessage(), e);
throw new RuntimeException("init error" + e.getMessage(), e);
}
finally
{
// Abort the context if it's still valid
if ((context != null) && context.isValid())
{
context.abort();
}
}
List ids = new ArrayList();
switch (dsoType)
{
case Constants.BITSTREAM:
ids.add(new Integer(id));
break;
case Constants.ITEM:
ids = bitstreamInfoDAO.getItemBitstreams(id);
break;
case Constants.COLLECTION:
ids = bitstreamInfoDAO.getCollectionBitstreams(id);
break;
case Constants.COMMUNITY:
ids = bitstreamInfoDAO.getCommunityBitstreams(id);
break;
}
delegate = new ListDispatcher(ids);
init = Boolean.TRUE;
} }
List ids = new ArrayList();
switch (dsoType)
{
case Constants.BITSTREAM:
ids.add(new Integer(id));
break;
case Constants.ITEM:
ids = bitstreamInfoDAO.getItemBitstreams(id);
break;
case Constants.COLLECTION:
ids = bitstreamInfoDAO.getCollectionBitstreams(id);
break;
case Constants.COMMUNITY:
ids = bitstreamInfoDAO.getCommunityBitstreams(id);
break;
}
delegate = new ListDispatcher(ids);
init = Boolean.TRUE;
} }
/** /**
@@ -160,12 +163,9 @@ public class HandleDispatcher implements BitstreamDispatcher
*/ */
public int next() public int next()
{ {
synchronized (init) if (init == Boolean.FALSE)
{ {
if (init == Boolean.FALSE) init();
{
init();
}
} }
return delegate.next(); return delegate.next();

View File

@@ -581,27 +581,32 @@ public class Bitstream extends DSpaceObject
// Build a list of Bundle objects // Build a list of Bundle objects
List<Bundle> bundles = new ArrayList<Bundle>(); List<Bundle> bundles = new ArrayList<Bundle>();
try
while (tri.hasNext())
{ {
TableRow r = tri.next(); while (tri.hasNext())
// First check the cache
Bundle fromCache = (Bundle) bContext.fromCache(Bundle.class, r
.getIntColumn("bundle_id"));
if (fromCache != null)
{ {
bundles.add(fromCache); TableRow r = tri.next();
}
else // First check the cache
{ Bundle fromCache = (Bundle) bContext.fromCache(Bundle.class, r
bundles.add(new Bundle(bContext, r)); .getIntColumn("bundle_id"));
if (fromCache != null)
{
bundles.add(fromCache);
}
else
{
bundles.add(new Bundle(bContext, r));
}
} }
} }
finally
// close the TableRowIterator to free up resources {
tri.close(); // close the TableRowIterator to free up resources
if (tri != null)
tri.close();
}
Bundle[] bundleArray = new Bundle[bundles.size()]; Bundle[] bundleArray = new Bundle[bundles.size()];
bundleArray = (Bundle[]) bundles.toArray(bundleArray); bundleArray = (Bundle[]) bundles.toArray(bundleArray);

View File

@@ -114,12 +114,19 @@ public class BitstreamFormat
"SELECT * FROM fileextension WHERE bitstream_format_id= ? ", "SELECT * FROM fileextension WHERE bitstream_format_id= ? ",
getID()); getID());
while (tri.hasNext()) try
{ {
extensions.add(tri.next().getStringColumn("extension")); while (tri.hasNext())
{
extensions.add(tri.next().getStringColumn("extension"));
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
tri.close();
} }
// close the TableRowIterator to free up resources
tri.close();
// Cache ourselves // Cache ourselves
context.cache(this, row.getIntColumn("bitstream_format_id")); context.cache(this, row.getIntColumn("bitstream_format_id"));
@@ -298,26 +305,33 @@ public class BitstreamFormat
TableRowIterator tri = DatabaseManager.queryTable(context, "bitstreamformatregistry", TableRowIterator tri = DatabaseManager.queryTable(context, "bitstreamformatregistry",
"SELECT * FROM bitstreamformatregistry ORDER BY bitstream_format_id"); "SELECT * FROM bitstreamformatregistry ORDER BY bitstream_format_id");
while (tri.hasNext()) try
{ {
TableRow row = tri.next(); while (tri.hasNext())
// From cache?
BitstreamFormat fromCache = (BitstreamFormat) context.fromCache(
BitstreamFormat.class, row
.getIntColumn("bitstream_format_id"));
if (fromCache != null)
{ {
formats.add(fromCache); TableRow row = tri.next();
}
else // From cache?
{ BitstreamFormat fromCache = (BitstreamFormat) context.fromCache(
formats.add(new BitstreamFormat(context, row)); BitstreamFormat.class, row
.getIntColumn("bitstream_format_id"));
if (fromCache != null)
{
formats.add(fromCache);
}
else
{
formats.add(new BitstreamFormat(context, row));
}
} }
} }
// close the TableRowIterator to free up resources finally
tri.close(); {
// close the TableRowIterator to free up resources
if (tri != null)
tri.close();
}
// Return the formats as an array // Return the formats as an array
BitstreamFormat[] formatArray = new BitstreamFormat[formats.size()]; BitstreamFormat[] formatArray = new BitstreamFormat[formats.size()];
@@ -349,26 +363,33 @@ public class BitstreamFormat
TableRowIterator tri = DatabaseManager.queryTable(context, TableRowIterator tri = DatabaseManager.queryTable(context,
"bitstreamformatregistry", myQuery); "bitstreamformatregistry", myQuery);
while (tri.hasNext()) try
{ {
TableRow row = tri.next(); while (tri.hasNext())
// From cache?
BitstreamFormat fromCache = (BitstreamFormat) context.fromCache(
BitstreamFormat.class, row
.getIntColumn("bitstream_format_id"));
if (fromCache != null)
{ {
formats.add(fromCache); TableRow row = tri.next();
}
else // From cache?
{ BitstreamFormat fromCache = (BitstreamFormat) context.fromCache(
formats.add(new BitstreamFormat(context, row)); BitstreamFormat.class, row
.getIntColumn("bitstream_format_id"));
if (fromCache != null)
{
formats.add(fromCache);
}
else
{
formats.add(new BitstreamFormat(context, row));
}
} }
} }
// close the TableRowIterator to free up resources finally
tri.close(); {
// close the TableRowIterator to free up resources
if (tri != null)
tri.close();
}
// Return the formats as an array // Return the formats as an array
BitstreamFormat[] formatArray = new BitstreamFormat[formats.size()]; BitstreamFormat[] formatArray = new BitstreamFormat[formats.size()];

View File

@@ -110,26 +110,33 @@ public class Bundle extends DSpaceObject
+ "bundle2bitstream.bitstream_id=bitstream.bitstream_id AND " + "bundle2bitstream.bitstream_id=bitstream.bitstream_id AND "
+ "bundle2bitstream.bundle_id= ? ", + "bundle2bitstream.bundle_id= ? ",
bundleRow.getIntColumn("bundle_id")); bundleRow.getIntColumn("bundle_id"));
while (tri.hasNext()) try
{ {
TableRow r = (TableRow) tri.next(); while (tri.hasNext())
// First check the cache
Bitstream fromCache = (Bitstream) context.fromCache(
Bitstream.class, r.getIntColumn("bitstream_id"));
if (fromCache != null)
{ {
bitstreams.add(fromCache); TableRow r = (TableRow) tri.next();
}
else // First check the cache
{ Bitstream fromCache = (Bitstream) context.fromCache(
bitstreams.add(new Bitstream(ourContext, r)); Bitstream.class, r.getIntColumn("bitstream_id"));
if (fromCache != null)
{
bitstreams.add(fromCache);
}
else
{
bitstreams.add(new Bitstream(ourContext, r));
}
} }
} }
// close the TableRowIterator to free up resources finally
tri.close(); {
// close the TableRowIterator to free up resources
if (tri != null)
tri.close();
}
// Cache ourselves // Cache ourselves
context.cache(this, row.getIntColumn("bundle_id")); context.cache(this, row.getIntColumn("bundle_id"));
@@ -331,26 +338,33 @@ public class Bundle extends DSpaceObject
"item2bundle.item_id=item.item_id AND " + "item2bundle.item_id=item.item_id AND " +
"item2bundle.bundle_id= ? ", "item2bundle.bundle_id= ? ",
bundleRow.getIntColumn("bundle_id")); bundleRow.getIntColumn("bundle_id"));
while (tri.hasNext()) try
{ {
TableRow r = (TableRow) tri.next(); while (tri.hasNext())
// Used cached copy if there is one
Item fromCache = (Item) ourContext.fromCache(Item.class, r
.getIntColumn("item_id"));
if (fromCache != null)
{ {
items.add(fromCache); TableRow r = (TableRow) tri.next();
}
else // Used cached copy if there is one
{ Item fromCache = (Item) ourContext.fromCache(Item.class, r
items.add(new Item(ourContext, r)); .getIntColumn("item_id"));
if (fromCache != null)
{
items.add(fromCache);
}
else
{
items.add(new Item(ourContext, r));
}
} }
} }
// close the TableRowIterator to free up resources finally
tri.close(); {
// close the TableRowIterator to free up resources
if (tri != null)
tri.close();
}
Item[] itemArray = new Item[items.size()]; Item[] itemArray = new Item[items.size()];
itemArray = (Item[]) items.toArray(itemArray); itemArray = (Item[]) items.toArray(itemArray);
@@ -503,13 +517,20 @@ public class Bundle extends DSpaceObject
"SELECT * FROM bundle2bitstream WHERE bitstream_id= ? ", "SELECT * FROM bundle2bitstream WHERE bitstream_id= ? ",
b.getID()); b.getID());
if (!tri.hasNext()) try
{ {
// The bitstream is an orphan, delete it if (!tri.hasNext())
b.delete(); {
// The bitstream is an orphan, delete it
b.delete();
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
tri.close();
} }
// close the TableRowIterator to free up resources
tri.close();
} }
/** /**

View File

@@ -289,25 +289,32 @@ public class Collection extends DSpaceObject
List<Collection> collections = new ArrayList<Collection>(); List<Collection> collections = new ArrayList<Collection>();
while (tri.hasNext()) try
{ {
TableRow row = tri.next(); while (tri.hasNext())
// First check the cache
Collection fromCache = (Collection) context.fromCache(
Collection.class, row.getIntColumn("collection_id"));
if (fromCache != null)
{ {
collections.add(fromCache); TableRow row = tri.next();
}
else // First check the cache
{ Collection fromCache = (Collection) context.fromCache(
collections.add(new Collection(context, row)); Collection.class, row.getIntColumn("collection_id"));
if (fromCache != null)
{
collections.add(fromCache);
}
else
{
collections.add(new Collection(context, row));
}
} }
} }
// close the TableRowIterator to free up resources finally
tri.close(); {
// close the TableRowIterator to free up resources
if (tri != null)
tri.close();
}
Collection[] collectionArray = new Collection[collections.size()]; Collection[] collectionArray = new Collection[collections.size()];
collectionArray = (Collection[]) collections.toArray(collectionArray); collectionArray = (Collection[]) collections.toArray(collectionArray);
@@ -924,24 +931,31 @@ public class Collection extends DSpaceObject
"SELECT * FROM collection2item WHERE item_id= ? ", "SELECT * FROM collection2item WHERE item_id= ? ",
item.getID()); item.getID());
if (!tri.hasNext()) try
{ {
//make the right to remove the item explicit because the implicit if (!tri.hasNext())
// relation {
//has been removed. This only has to concern the currentUser //make the right to remove the item explicit because the implicit
// because // relation
//he started the removal process and he will end it too. //has been removed. This only has to concern the currentUser
//also add right to remove from the item to remove it's bundles. // because
AuthorizeManager.addPolicy(ourContext, item, Constants.DELETE, //he started the removal process and he will end it too.
ourContext.getCurrentUser()); //also add right to remove from the item to remove it's bundles.
AuthorizeManager.addPolicy(ourContext, item, Constants.REMOVE, AuthorizeManager.addPolicy(ourContext, item, Constants.DELETE,
ourContext.getCurrentUser()); ourContext.getCurrentUser());
AuthorizeManager.addPolicy(ourContext, item, Constants.REMOVE,
ourContext.getCurrentUser());
// Orphan; delete it // Orphan; delete it
item.delete(); item.delete();
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
tri.close();
} }
// close the TableRowIterator to free up resources
tri.close();
} }
/** /**
@@ -1181,31 +1195,38 @@ public class Collection extends DSpaceObject
// Build a list of Community objects // Build a list of Community objects
List<Community> communities = new ArrayList<Community>(); List<Community> communities = new ArrayList<Community>();
while (tri.hasNext()) try
{ {
TableRow row = tri.next(); while (tri.hasNext())
// First check the cache
Community owner = (Community) ourContext.fromCache(Community.class,
row.getIntColumn("community_id"));
if (owner == null)
{ {
owner = new Community(ourContext, row); TableRow row = tri.next();
}
communities.add(owner); // First check the cache
Community owner = (Community) ourContext.fromCache(Community.class,
row.getIntColumn("community_id"));
// now add any parent communities if (owner == null)
Community[] parents = owner.getAllParents(); {
owner = new Community(ourContext, row);
}
for (int i = 0; i < parents.length; i++) communities.add(owner);
{
communities.add(parents[i]); // now add any parent communities
Community[] parents = owner.getAllParents();
for (int i = 0; i < parents.length; i++)
{
communities.add(parents[i]);
}
} }
} }
// close the TableRowIterator to free up resources finally
tri.close(); {
// close the TableRowIterator to free up resources
if (tri != null)
tri.close();
}
Community[] communityArray = new Community[communities.size()]; Community[] communityArray = new Community[communities.size()];
communityArray = (Community[]) communities.toArray(communityArray); communityArray = (Community[]) communities.toArray(communityArray);
@@ -1316,20 +1337,39 @@ public class Collection extends DSpaceObject
public int countItems() public int countItems()
throws SQLException throws SQLException
{ {
String query = "SELECT count(*) FROM collection2item, item WHERE " int itemcount = 0;
+ "collection2item.collection_id = ? " PreparedStatement statement = null;
+ "AND collection2item.item_id = item.item_id " ResultSet rs = null;
+ "AND in_archive ='1' AND item.withdrawn='0' ";
PreparedStatement statement = ourContext.getDBConnection().prepareStatement(query); try
statement.setInt(1,getID()); {
String query = "SELECT count(*) FROM collection2item, item WHERE "
ResultSet rs = statement.executeQuery(); + "collection2item.collection_id = ? "
+ "AND collection2item.item_id = item.item_id "
rs.next(); + "AND in_archive ='1' AND item.withdrawn='0' ";
int itemcount = rs.getInt(1);
statement.close(); statement = ourContext.getDBConnection().prepareStatement(query);
statement.setInt(1,getID());
rs = statement.executeQuery();
if (rs != null)
{
rs.next();
itemcount = rs.getInt(1);
}
}
finally
{
if (rs != null)
{
try { rs.close(); } catch (SQLException sqle) { }
}
if (statement != null)
{
try { statement.close(); } catch (SQLException sqle) { }
}
}
return itemcount; return itemcount;
} }

View File

@@ -237,25 +237,32 @@ public class Community extends DSpaceObject
List<Community> communities = new ArrayList<Community>(); List<Community> communities = new ArrayList<Community>();
while (tri.hasNext()) try
{ {
TableRow row = tri.next(); while (tri.hasNext())
// First check the cache
Community fromCache = (Community) context.fromCache(
Community.class, row.getIntColumn("community_id"));
if (fromCache != null)
{ {
communities.add(fromCache); TableRow row = tri.next();
}
else // First check the cache
{ Community fromCache = (Community) context.fromCache(
communities.add(new Community(context, row)); Community.class, row.getIntColumn("community_id"));
if (fromCache != null)
{
communities.add(fromCache);
}
else
{
communities.add(new Community(context, row));
}
} }
} }
// close the TableRowIterator to free up resources finally
tri.close(); {
// close the TableRowIterator to free up resources
if (tri != null)
tri.close();
}
Community[] communityArray = new Community[communities.size()]; Community[] communityArray = new Community[communities.size()];
communityArray = (Community[]) communities.toArray(communityArray); communityArray = (Community[]) communities.toArray(communityArray);
@@ -283,25 +290,32 @@ public class Community extends DSpaceObject
List<Community> topCommunities = new ArrayList<Community>(); List<Community> topCommunities = new ArrayList<Community>();
while (tri.hasNext()) try
{ {
TableRow row = tri.next(); while (tri.hasNext())
// First check the cache
Community fromCache = (Community) context.fromCache(
Community.class, row.getIntColumn("community_id"));
if (fromCache != null)
{ {
topCommunities.add(fromCache); TableRow row = tri.next();
}
else // First check the cache
{ Community fromCache = (Community) context.fromCache(
topCommunities.add(new Community(context, row)); Community.class, row.getIntColumn("community_id"));
if (fromCache != null)
{
topCommunities.add(fromCache);
}
else
{
topCommunities.add(new Community(context, row));
}
} }
} }
// close the TableRowIterator to free up resources finally
tri.close(); {
// close the TableRowIterator to free up resources
if (tri != null)
tri.close();
}
Community[] communityArray = new Community[topCommunities.size()]; Community[] communityArray = new Community[topCommunities.size()];
communityArray = (Community[]) topCommunities.toArray(communityArray); communityArray = (Community[]) topCommunities.toArray(communityArray);
@@ -514,25 +528,32 @@ public class Community extends DSpaceObject
getID()); getID());
// Make Collection objects // Make Collection objects
while (tri.hasNext()) try
{ {
TableRow row = tri.next(); while (tri.hasNext())
// First check the cache
Collection fromCache = (Collection) ourContext.fromCache(
Collection.class, row.getIntColumn("collection_id"));
if (fromCache != null)
{ {
collections.add(fromCache); TableRow row = tri.next();
}
else // First check the cache
{ Collection fromCache = (Collection) ourContext.fromCache(
collections.add(new Collection(ourContext, row)); Collection.class, row.getIntColumn("collection_id"));
if (fromCache != null)
{
collections.add(fromCache);
}
else
{
collections.add(new Collection(ourContext, row));
}
} }
} }
// close the TableRowIterator to free up resources finally
tri.close(); {
// close the TableRowIterator to free up resources
if (tri != null)
tri.close();
}
// Put them in an array // Put them in an array
Collection[] collectionArray = new Collection[collections.size()]; Collection[] collectionArray = new Collection[collections.size()];
@@ -562,25 +583,32 @@ public class Community extends DSpaceObject
// Make Community objects // Make Community objects
while (tri.hasNext()) try
{ {
TableRow row = tri.next(); while (tri.hasNext())
// First check the cache
Community fromCache = (Community) ourContext.fromCache(
Community.class, row.getIntColumn("community_id"));
if (fromCache != null)
{ {
subcommunities.add(fromCache); TableRow row = tri.next();
}
else // First check the cache
{ Community fromCache = (Community) ourContext.fromCache(
subcommunities.add(new Community(ourContext, row)); Community.class, row.getIntColumn("community_id"));
if (fromCache != null)
{
subcommunities.add(fromCache);
}
else
{
subcommunities.add(new Community(ourContext, row));
}
} }
} }
// close the TableRowIterator to free up resources finally
tri.close(); {
// close the TableRowIterator to free up resources
if (tri != null)
tri.close();
}
// Put them in an array // Put them in an array
Community[] communityArray = new Community[subcommunities.size()]; Community[] communityArray = new Community[subcommunities.size()];
@@ -608,25 +636,32 @@ public class Community extends DSpaceObject
getID()); getID());
// Make Community object // Make Community object
if (tri.hasNext()) try
{ {
TableRow row = tri.next(); if (tri.hasNext())
// First check the cache
Community fromCache = (Community) ourContext.fromCache(
Community.class, row.getIntColumn("community_id"));
if (fromCache != null)
{ {
parentCommunity = fromCache; TableRow row = tri.next();
}
else // First check the cache
{ Community fromCache = (Community) ourContext.fromCache(
parentCommunity = new Community(ourContext, row); Community.class, row.getIntColumn("community_id"));
if (fromCache != null)
{
parentCommunity = fromCache;
}
else
{
parentCommunity = new Community(ourContext, row);
}
} }
} }
// close the TableRowIterator to free up resources finally
tri.close(); {
// close the TableRowIterator to free up resources
if (tri != null)
tri.close();
}
return parentCommunity; return parentCommunity;
} }
@@ -693,22 +728,29 @@ public class Community extends DSpaceObject
"community2collection", "community2collection",
"SELECT * FROM community2collection WHERE " + "SELECT * FROM community2collection WHERE " +
"community_id= ? AND collection_id= ? ",getID(),c.getID()); "community_id= ? AND collection_id= ? ",getID(),c.getID());
if (!tri.hasNext()) try
{ {
// No existing mapping, so add one if (!tri.hasNext())
TableRow mappingRow = DatabaseManager.create(ourContext, {
"community2collection"); // No existing mapping, so add one
TableRow mappingRow = DatabaseManager.create(ourContext,
"community2collection");
mappingRow.setColumn("community_id", getID()); mappingRow.setColumn("community_id", getID());
mappingRow.setColumn("collection_id", c.getID()); mappingRow.setColumn("collection_id", c.getID());
ourContext.addEvent(new Event(Event.ADD, Constants.COMMUNITY, getID(), Constants.COLLECTION, c.getID(), c.getHandle())); ourContext.addEvent(new Event(Event.ADD, Constants.COMMUNITY, getID(), Constants.COLLECTION, c.getID(), c.getHandle()));
DatabaseManager.update(ourContext, mappingRow); DatabaseManager.update(ourContext, mappingRow);
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
tri.close();
} }
// close the TableRowIterator to free up resources
tri.close();
} }
/** /**
@@ -748,22 +790,29 @@ public class Community extends DSpaceObject
"community2community", "community2community",
"SELECT * FROM community2community WHERE parent_comm_id= ? "+ "SELECT * FROM community2community WHERE parent_comm_id= ? "+
"AND child_comm_id= ? ",getID(), c.getID()); "AND child_comm_id= ? ",getID(), c.getID());
if (!tri.hasNext()) try
{ {
// No existing mapping, so add one if (!tri.hasNext())
TableRow mappingRow = DatabaseManager.create(ourContext, {
"community2community"); // No existing mapping, so add one
TableRow mappingRow = DatabaseManager.create(ourContext,
"community2community");
mappingRow.setColumn("parent_comm_id", getID()); mappingRow.setColumn("parent_comm_id", getID());
mappingRow.setColumn("child_comm_id", c.getID()); mappingRow.setColumn("child_comm_id", c.getID());
ourContext.addEvent(new Event(Event.ADD, Constants.COMMUNITY, getID(), Constants.COMMUNITY, c.getID(), c.getHandle())); ourContext.addEvent(new Event(Event.ADD, Constants.COMMUNITY, getID(), Constants.COMMUNITY, c.getID(), c.getHandle()));
DatabaseManager.update(ourContext, mappingRow); DatabaseManager.update(ourContext, mappingRow);
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
tri.close();
} }
// close the TableRowIterator to free up resources
tri.close();
} }
/** /**
@@ -793,25 +842,32 @@ public class Community extends DSpaceObject
"SELECT * FROM community2collection WHERE collection_id= ? ", "SELECT * FROM community2collection WHERE collection_id= ? ",
c.getID()); c.getID());
if (!tri.hasNext()) try
{ {
//make the right to remove the collection explicit because the if (!tri.hasNext())
// implicit relation {
//has been removed. This only has to concern the currentUser //make the right to remove the collection explicit because the
// because // implicit relation
//he started the removal process and he will end it too. //has been removed. This only has to concern the currentUser
//also add right to remove from the collection to remove it's // because
// items. //he started the removal process and he will end it too.
AuthorizeManager.addPolicy(ourContext, c, Constants.DELETE, //also add right to remove from the collection to remove it's
ourContext.getCurrentUser()); // items.
AuthorizeManager.addPolicy(ourContext, c, Constants.REMOVE, AuthorizeManager.addPolicy(ourContext, c, Constants.DELETE,
ourContext.getCurrentUser()); ourContext.getCurrentUser());
AuthorizeManager.addPolicy(ourContext, c, Constants.REMOVE,
ourContext.getCurrentUser());
// Orphan; delete it // Orphan; delete it
c.delete(); c.delete();
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
tri.close();
} }
// close the TableRowIterator to free up resources
tri.close();
} }
/** /**
@@ -841,25 +897,32 @@ public class Community extends DSpaceObject
"SELECT * FROM community2community WHERE child_comm_id= ? ", "SELECT * FROM community2community WHERE child_comm_id= ? ",
c.getID()); c.getID());
if (!tri.hasNext()) try
{ {
//make the right to remove the sub explicit because the implicit if (!tri.hasNext())
// relation {
//has been removed. This only has to concern the currentUser //make the right to remove the sub explicit because the implicit
// because // relation
//he started the removal process and he will end it too. //has been removed. This only has to concern the currentUser
//also add right to remove from the subcommunity to remove it's // because
// children. //he started the removal process and he will end it too.
AuthorizeManager.addPolicy(ourContext, c, Constants.DELETE, //also add right to remove from the subcommunity to remove it's
ourContext.getCurrentUser()); // children.
AuthorizeManager.addPolicy(ourContext, c, Constants.REMOVE, AuthorizeManager.addPolicy(ourContext, c, Constants.DELETE,
ourContext.getCurrentUser()); ourContext.getCurrentUser());
AuthorizeManager.addPolicy(ourContext, c, Constants.REMOVE,
ourContext.getCurrentUser());
// Orphan; delete it // Orphan; delete it
c.delete(); c.delete();
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
tri.close();
} }
// close the TableRowIterator to free up resources
tri.close();
} }
/** /**

View File

@@ -105,17 +105,24 @@ public class FormatIdentifier
extension); extension);
BitstreamFormat retFormat = null; BitstreamFormat retFormat = null;
if (tri.hasNext()) try
{ {
// Return first match if (tri.hasNext())
retFormat = new BitstreamFormat(context, tri.next()); {
// Return first match
retFormat = new BitstreamFormat(context, tri.next());
}
else
{
retFormat = null;
}
} }
else finally
{ {
retFormat = null; // close the TableRowIterator to free up resources
if (tri != null)
tri.close();
} }
// close the TableRowIterator to free up resources
tri.close();
return retFormat; return retFormat;
} }
} }

View File

@@ -143,39 +143,46 @@ public class Item extends DSpaceObject
// Get Dublin Core metadata // Get Dublin Core metadata
TableRowIterator tri = retrieveMetadata(); TableRowIterator tri = retrieveMetadata();
while (tri.hasNext()) try
{ {
TableRow resultRow = tri.next(); while (tri.hasNext())
// Get the associated metadata field and schema information
int fieldID = resultRow.getIntColumn("metadata_field_id");
MetadataField field = MetadataField.find(context, fieldID);
if (field == null)
{ {
log.error("Loading item - cannot found metadata field " TableRow resultRow = tri.next();
+ fieldID);
// Get the associated metadata field and schema information
int fieldID = resultRow.getIntColumn("metadata_field_id");
MetadataField field = MetadataField.find(context, fieldID);
if (field == null)
{
log.error("Loading item - cannot found metadata field "
+ fieldID);
}
else
{
MetadataSchema schema = MetadataSchema.find(
context, field.getSchemaID());
// Make a DCValue object
DCValue dcv = new DCValue();
dcv.element = field.getElement();
dcv.qualifier = field.getQualifier();
dcv.value = resultRow.getStringColumn("text_value");
dcv.language = resultRow.getStringColumn("text_lang");
//dcv.namespace = schema.getNamespace();
dcv.schema = schema.getName();
// Add it to the list
dublinCore.add(dcv);
}
} }
else
{
MetadataSchema schema = MetadataSchema.find(
context, field.getSchemaID());
// Make a DCValue object
DCValue dcv = new DCValue();
dcv.element = field.getElement();
dcv.qualifier = field.getQualifier();
dcv.value = resultRow.getStringColumn("text_value");
dcv.language = resultRow.getStringColumn("text_lang");
//dcv.namespace = schema.getNamespace();
dcv.schema = schema.getName();
// Add it to the list
dublinCore.add(dcv);
} }
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
tri.close();
} }
// close the TableRowIterator to free up resources
tri.close();
// Get our Handle if any // Get our Handle if any
handle = HandleManager.findHandle(context, this); handle = HandleManager.findHandle(context, this);
@@ -918,25 +925,32 @@ public class Item extends DSpaceObject
"collection2item.item_id= ? ", "collection2item.item_id= ? ",
itemRow.getIntColumn("item_id")); itemRow.getIntColumn("item_id"));
while (tri.hasNext()) try
{ {
TableRow row = tri.next(); while (tri.hasNext())
// First check the cache
Collection fromCache = (Collection) ourContext.fromCache(
Collection.class, row.getIntColumn("collection_id"));
if (fromCache != null)
{ {
collections.add(fromCache); TableRow row = tri.next();
}
else // First check the cache
{ Collection fromCache = (Collection) ourContext.fromCache(
collections.add(new Collection(ourContext, row)); Collection.class, row.getIntColumn("collection_id"));
if (fromCache != null)
{
collections.add(fromCache);
}
else
{
collections.add(new Collection(ourContext, row));
}
} }
} }
// close the TableRowIterator to free up resources finally
tri.close(); {
// close the TableRowIterator to free up resources
if (tri != null)
tri.close();
}
Collection[] collectionArray = new Collection[collections.size()]; Collection[] collectionArray = new Collection[collections.size()];
collectionArray = (Collection[]) collections.toArray(collectionArray); collectionArray = (Collection[]) collections.toArray(collectionArray);
@@ -963,31 +977,38 @@ public class Item extends DSpaceObject
"AND community2item.item_id= ? ", "AND community2item.item_id= ? ",
itemRow.getIntColumn("item_id")); itemRow.getIntColumn("item_id"));
while (tri.hasNext()) try
{ {
TableRow row = tri.next(); while (tri.hasNext())
// First check the cache
Community owner = (Community) ourContext.fromCache(Community.class,
row.getIntColumn("community_id"));
if (owner == null)
{ {
owner = new Community(ourContext, row); TableRow row = tri.next();
}
communities.add(owner); // First check the cache
Community owner = (Community) ourContext.fromCache(Community.class,
row.getIntColumn("community_id"));
// now add any parent communities if (owner == null)
Community[] parents = owner.getAllParents(); {
owner = new Community(ourContext, row);
}
for (int i = 0; i < parents.length; i++) communities.add(owner);
{
communities.add(parents[i]); // now add any parent communities
Community[] parents = owner.getAllParents();
for (int i = 0; i < parents.length; i++)
{
communities.add(parents[i]);
}
} }
} }
// close the TableRowIterator to free up resources finally
tri.close(); {
// close the TableRowIterator to free up resources
if (tri != null)
tri.close();
}
Community[] communityArray = new Community[communities.size()]; Community[] communityArray = new Community[communities.size()];
communityArray = (Community[]) communities.toArray(communityArray); communityArray = (Community[]) communities.toArray(communityArray);
@@ -1012,25 +1033,32 @@ public class Item extends DSpaceObject
"item2bundle.item_id= ? ", "item2bundle.item_id= ? ",
itemRow.getIntColumn("item_id")); itemRow.getIntColumn("item_id"));
while (tri.hasNext()) try
{ {
TableRow r = tri.next(); while (tri.hasNext())
{
TableRow r = tri.next();
// First check the cache // First check the cache
Bundle fromCache = (Bundle) ourContext.fromCache(Bundle.class, Bundle fromCache = (Bundle) ourContext.fromCache(Bundle.class,
r.getIntColumn("bundle_id")); r.getIntColumn("bundle_id"));
if (fromCache != null) if (fromCache != null)
{ {
bundles.add(fromCache); bundles.add(fromCache);
} }
else else
{ {
bundles.add(new Bundle(ourContext, r)); bundles.add(new Bundle(ourContext, r));
} }
} }
// close the TableRowIterator to free up resources }
tri.close(); finally
{
// close the TableRowIterator to free up resources
if (tri != null)
tri.close();
}
} }
Bundle[] bundleArray = new Bundle[bundles.size()]; Bundle[] bundleArray = new Bundle[bundles.size()];
@@ -1184,25 +1212,32 @@ public class Item extends DSpaceObject
"SELECT * FROM item2bundle WHERE bundle_id= ? ", "SELECT * FROM item2bundle WHERE bundle_id= ? ",
b.getID()); b.getID());
if (!tri.hasNext()) try
{ {
//make the right to remove the bundle explicit because the implicit if (!tri.hasNext())
// relation {
//has been removed. This only has to concern the currentUser //make the right to remove the bundle explicit because the implicit
// because // relation
//he started the removal process and he will end it too. //has been removed. This only has to concern the currentUser
//also add right to remove from the bundle to remove it's // because
// bitstreams. //he started the removal process and he will end it too.
AuthorizeManager.addPolicy(ourContext, b, Constants.DELETE, //also add right to remove from the bundle to remove it's
ourContext.getCurrentUser()); // bitstreams.
AuthorizeManager.addPolicy(ourContext, b, Constants.REMOVE, AuthorizeManager.addPolicy(ourContext, b, Constants.DELETE,
ourContext.getCurrentUser()); ourContext.getCurrentUser());
AuthorizeManager.addPolicy(ourContext, b, Constants.REMOVE,
ourContext.getCurrentUser());
// The bundle is an orphan, delete it // The bundle is an orphan, delete it
b.delete(); b.delete();
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
tri.close();
} }
// close the TableRowIterator to free up resources
tri.close();
} }
/** /**

View File

@@ -320,15 +320,20 @@ public class MetadataField
schemaID, element, qualifier); schemaID, element, qualifier);
} }
TableRow row = null; TableRow row = null;
if (tri.hasNext()) try
{ {
row = tri.next(); if (tri.hasNext())
{
row = tri.next();
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
tri.close();
} }
// close the TableRowIterator to free up resources
tri.close();
if (row == null) if (row == null)
{ {
@@ -354,14 +359,21 @@ public class MetadataField
// Get all the metadatafieldregistry rows // Get all the metadatafieldregistry rows
TableRowIterator tri = DatabaseManager.queryTable(context, "MetadataFieldRegistry", TableRowIterator tri = DatabaseManager.queryTable(context, "MetadataFieldRegistry",
"SELECT mfr.* FROM MetadataFieldRegistry mfr, MetadataSchemaRegistry msr where mfr.metadata_schema_id= msr.metadata_schema_id ORDER BY msr.short_id, mfr.element, mfr.qualifier"); "SELECT mfr.* FROM MetadataFieldRegistry mfr, MetadataSchemaRegistry msr where mfr.metadata_schema_id= msr.metadata_schema_id ORDER BY msr.short_id, mfr.element, mfr.qualifier");
// Make into DC Type objects try
while (tri.hasNext())
{ {
fields.add(new MetadataField(tri.next())); // Make into DC Type objects
while (tri.hasNext())
{
fields.add(new MetadataField(tri.next()));
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
tri.close();
} }
// close the TableRowIterator to free up resources
tri.close();
// Convert list into an array // Convert list into an array
MetadataField[] typeArray = new MetadataField[fields.size()]; MetadataField[] typeArray = new MetadataField[fields.size()];
@@ -386,13 +398,20 @@ public class MetadataField
"SELECT * FROM MetadataFieldRegistry WHERE metadata_schema_id= ? " + "SELECT * FROM MetadataFieldRegistry WHERE metadata_schema_id= ? " +
" ORDER BY element, qualifier", schemaID); " ORDER BY element, qualifier", schemaID);
// Make into DC Type objects try
while (tri.hasNext())
{ {
fields.add(new MetadataField(tri.next())); // Make into DC Type objects
while (tri.hasNext())
{
fields.add(new MetadataField(tri.next()));
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
tri.close();
} }
// close the TableRowIterator to free up resources
tri.close();
// Convert list into an array // Convert list into an array
MetadataField[] typeArray = new MetadataField[fields.size()]; MetadataField[] typeArray = new MetadataField[fields.size()];
@@ -509,41 +528,60 @@ public class MetadataField
String qualifier) throws IOException, SQLException, String qualifier) throws IOException, SQLException,
AuthorizeException AuthorizeException
{ {
Connection con = context.getDBConnection();
TableRow reg = DatabaseManager.row("MetadataFieldRegistry");
String qualifierClause = "";
if (qualifier == null)
{
qualifierClause = "and qualifier is null";
}
else
{
qualifierClause = "and qualifier = ?";
}
String query = "SELECT COUNT(*) FROM " + reg.getTable()
+ " WHERE metadata_schema_id= ? "
+ " and metadata_field_id != ? "
+ " and element= ? " + qualifierClause;
PreparedStatement statement = con.prepareStatement(query);
statement.setInt(1,schemaID);
statement.setInt(2,fieldID);
statement.setString(3,element);
if (qualifier != null)
{
statement.setString(4,qualifier);
}
ResultSet rs = statement.executeQuery();
int count = 0; int count = 0;
if (rs.next()) Connection con = null;
PreparedStatement statement = null;
ResultSet rs = null;
try
{ {
count = rs.getInt(1); con = context.getDBConnection();
TableRow reg = DatabaseManager.row("MetadataFieldRegistry");
String qualifierClause = "";
if (qualifier == null)
{
qualifierClause = "and qualifier is null";
}
else
{
qualifierClause = "and qualifier = ?";
}
String query = "SELECT COUNT(*) FROM " + reg.getTable()
+ " WHERE metadata_schema_id= ? "
+ " and metadata_field_id != ? "
+ " and element= ? " + qualifierClause;
statement = con.prepareStatement(query);
statement.setInt(1,schemaID);
statement.setInt(2,fieldID);
statement.setString(3,element);
if (qualifier != null)
{
statement.setString(4,qualifier);
}
rs = statement.executeQuery();
if (rs.next())
{
count = rs.getInt(1);
}
}
finally
{
if (rs != null)
{
try { rs.close(); } catch (SQLException sqle) { }
}
if (statement != null)
{
try { statement.close(); } catch (SQLException sqle) { }
}
} }
return (count == 0); return (count == 0);
@@ -604,21 +642,36 @@ public class MetadataField
{ {
if (id2field != null) if (id2field != null)
return; return;
id2field = new HashMap();
log.info("Loading MetadataField elements into cache.");
// Grab rows from DB synchronized (MetadataField.class)
TableRowIterator tri = DatabaseManager.queryTable(context,"MetadataFieldRegistry",
"SELECT * from MetadataFieldRegistry");
while (tri.hasNext())
{ {
TableRow row = tri.next(); if (id2field == null)
int fieldID = row.getIntColumn("metadata_field_id"); {
id2field.put(new Integer(fieldID), new MetadataField(row)); HashMap new_id2field = new HashMap();
} log.info("Loading MetadataField elements into cache.");
// close the TableRowIterator to free up resources // Grab rows from DB
tri.close(); TableRowIterator tri = DatabaseManager.queryTable(context,"MetadataFieldRegistry",
"SELECT * from MetadataFieldRegistry");
try
{
while (tri.hasNext())
{
TableRow row = tri.next();
int fieldID = row.getIntColumn("metadata_field_id");
new_id2field.put(new Integer(fieldID), new MetadataField(row));
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
tri.close();
}
id2field = new_id2field;
}
}
} }
} }

View File

@@ -262,13 +262,19 @@ public class MetadataSchema
namespace); namespace);
TableRow row = null; TableRow row = null;
if (tri.hasNext()) try
{ {
row = tri.next(); if (tri.hasNext())
{
row = tri.next();
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
tri.close();
} }
// close the TableRowIterator to free up resources
tri.close();
if (row == null) if (row == null)
{ {
@@ -360,13 +366,20 @@ public class MetadataSchema
TableRowIterator tri = DatabaseManager.queryTable(context, "MetadataSchemaRegistry", TableRowIterator tri = DatabaseManager.queryTable(context, "MetadataSchemaRegistry",
"SELECT * FROM MetadataSchemaRegistry ORDER BY metadata_schema_id"); "SELECT * FROM MetadataSchemaRegistry ORDER BY metadata_schema_id");
// Make into DC Type objects try
while (tri.hasNext())
{ {
schemas.add(new MetadataSchema(tri.next())); // Make into DC Type objects
while (tri.hasNext())
{
schemas.add(new MetadataSchema(tri.next()));
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
tri.close();
} }
// close the TableRowIterator to free up resources
tri.close();
// Convert list into an array // Convert list into an array
MetadataSchema[] typeArray = new MetadataSchema[schemas.size()]; MetadataSchema[] typeArray = new MetadataSchema[schemas.size()];
@@ -385,22 +398,41 @@ public class MetadataSchema
private boolean uniqueNamespace(Context context, String namespace) private boolean uniqueNamespace(Context context, String namespace)
throws SQLException throws SQLException
{ {
Connection con = context.getDBConnection();
TableRow reg = DatabaseManager.row("MetadataSchemaRegistry");
String query = "SELECT COUNT(*) FROM " + reg.getTable() + " " +
"WHERE metadata_schema_id != ? " +
"AND namespace= ? ";
PreparedStatement statement = con.prepareStatement(query);
statement.setInt(1,schemaID);
statement.setString(2,namespace);
ResultSet rs = statement.executeQuery();
int count = 0; int count = 0;
if (rs.next()) Connection con = context.getDBConnection();
PreparedStatement statement = null;
ResultSet rs = null;
try
{ {
count = rs.getInt(1); TableRow reg = DatabaseManager.row("MetadataSchemaRegistry");
String query = "SELECT COUNT(*) FROM " + reg.getTable() + " " +
"WHERE metadata_schema_id != ? " +
"AND namespace= ? ";
statement = con.prepareStatement(query);
statement.setInt(1,schemaID);
statement.setString(2,namespace);
rs = statement.executeQuery();
if (rs.next())
{
count = rs.getInt(1);
}
}
finally
{
if (rs != null)
{
try { rs.close(); } catch (SQLException sqle) { }
}
if (statement != null)
{
try { statement.close(); } catch (SQLException sqle) { }
}
} }
return (count == 0); return (count == 0);
@@ -417,23 +449,41 @@ public class MetadataSchema
private boolean uniqueShortName(Context context, String name) private boolean uniqueShortName(Context context, String name)
throws SQLException throws SQLException
{ {
Connection con = context.getDBConnection();
TableRow reg = DatabaseManager.row("MetadataSchemaRegistry");
String query = "SELECT COUNT(*) FROM " + reg.getTable() + " " +
"WHERE metadata_schema_id != ? " +
"AND short_id = ? ";
PreparedStatement statement = con.prepareStatement(query);
statement.setInt(1,schemaID);
statement.setString(2,name);
ResultSet rs = statement.executeQuery();
int count = 0; int count = 0;
if (rs.next()) Connection con = context.getDBConnection();
PreparedStatement statement = null;
ResultSet rs = null;
try
{ {
count = rs.getInt(1); TableRow reg = DatabaseManager.row("MetadataSchemaRegistry");
String query = "SELECT COUNT(*) FROM " + reg.getTable() + " " +
"WHERE metadata_schema_id != ? " +
"AND short_id = ? ";
statement = con.prepareStatement(query);
statement.setInt(1,schemaID);
statement.setString(2,name);
rs = statement.executeQuery();
if (rs.next())
{
count = rs.getInt(1);
}
}
finally
{
if (rs != null)
{
try { rs.close(); } catch (SQLException sqle) { }
}
if (statement != null)
{
try { statement.close(); } catch (SQLException sqle) { }
}
} }
return (count == 0); return (count == 0);
@@ -501,21 +551,38 @@ public class MetadataSchema
if (id2schema != null && name2schema != null) if (id2schema != null && name2schema != null)
return; return;
log.info("Loading schema cache for fast finds"); synchronized (MetadataSchema.class)
id2schema = new HashMap();
name2schema = new HashMap();
TableRowIterator tri = DatabaseManager.queryTable(context,"MetadataSchemaRegistry",
"SELECT * from MetadataSchemaRegistry");
while (tri.hasNext())
{ {
TableRow row = tri.next(); if (id2schema == null && name2schema == null)
{
log.info("Loading schema cache for fast finds");
HashMap new_id2schema = new HashMap();
HashMap new_name2schema = new HashMap();
MetadataSchema s = new MetadataSchema(row); TableRowIterator tri = DatabaseManager.queryTable(context,"MetadataSchemaRegistry",
id2schema.put(new Integer(s.schemaID), s); "SELECT * from MetadataSchemaRegistry");
name2schema.put(s.name, s);
try
{
while (tri.hasNext())
{
TableRow row = tri.next();
MetadataSchema s = new MetadataSchema(row);
new_id2schema.put(new Integer(s.schemaID), s);
new_name2schema.put(s.name, s);
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
tri.close();
}
id2schema = new_id2schema;
name2schema = new_name2schema;
}
} }
// close the TableRowIterator to free up resources
tri.close();
} }
} }

View File

@@ -278,13 +278,19 @@ public class MetadataValue
valueId); valueId);
TableRow row = null; TableRow row = null;
if (tri.hasNext()) try
{ {
row = tri.next(); if (tri.hasNext())
{
row = tri.next();
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
tri.close();
} }
// close the TableRowIterator to free up resources
tri.close();
if (row == null) if (row == null)
{ {
@@ -316,14 +322,20 @@ public class MetadataValue
TableRow row = null; TableRow row = null;
java.util.Collection ret = new ArrayList(); java.util.Collection ret = new ArrayList();
while (tri.hasNext()) try
{ {
row = tri.next(); while (tri.hasNext())
ret.add(new MetadataValue(row)); {
row = tri.next();
ret.add(new MetadataValue(row));
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
tri.close();
} }
// close the TableRowIterator to free up resources
tri.close();
return ret; return ret;
} }

View File

@@ -121,16 +121,23 @@ public class SupervisedItem extends WorkspaceItem
TableRowIterator tri = DatabaseManager.queryTable(context, TableRowIterator tri = DatabaseManager.queryTable(context,
"workspaceitem", "workspaceitem",
query); query);
while (tri.hasNext()) try
{ {
TableRow row = tri.next(); while (tri.hasNext())
SupervisedItem si = new SupervisedItem(context, row); {
TableRow row = tri.next();
sItems.add(si); SupervisedItem si = new SupervisedItem(context, row);
sItems.add(si);
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
tri.close();
} }
tri.close();
SupervisedItem[] siArray = new SupervisedItem[sItems.size()]; SupervisedItem[] siArray = new SupervisedItem[sItems.size()];
siArray = (SupervisedItem[]) sItems.toArray(siArray); siArray = (SupervisedItem[]) sItems.toArray(siArray);
@@ -159,16 +166,23 @@ public class SupervisedItem extends WorkspaceItem
"ORDER BY epersongroup.name"; "ORDER BY epersongroup.name";
TableRowIterator tri = DatabaseManager.queryTable(c,"epersongroup",query, wi); TableRowIterator tri = DatabaseManager.queryTable(c,"epersongroup",query, wi);
while (tri.hasNext()) try
{ {
TableRow row = tri.next(); while (tri.hasNext())
Group group = Group.find(c,row.getIntColumn("eperson_group_id")); {
TableRow row = tri.next();
groupList.add(group); Group group = Group.find(c,row.getIntColumn("eperson_group_id"));
groupList.add(group);
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
tri.close();
} }
tri.close();
Group[] groupArray = new Group[groupList.size()]; Group[] groupArray = new Group[groupList.size()];
groupArray = (Group[]) groupList.toArray(groupArray); groupArray = (Group[]) groupList.toArray(groupArray);
@@ -201,17 +215,24 @@ public class SupervisedItem extends WorkspaceItem
TableRowIterator tri = DatabaseManager.queryTable(ourContext, TableRowIterator tri = DatabaseManager.queryTable(ourContext,
"epersongroup", "epersongroup",
query, this.getID()); query, this.getID());
while (tri.hasNext()) try
{ {
TableRow row = tri.next(); while (tri.hasNext())
Group group = Group.find(ourContext, {
row.getIntColumn("eperson_group_id")); TableRow row = tri.next();
Group group = Group.find(ourContext,
groupList.add(group); row.getIntColumn("eperson_group_id"));
groupList.add(group);
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
tri.close();
} }
tri.close();
Group[] groupArray = new Group[groupList.size()]; Group[] groupArray = new Group[groupList.size()];
groupArray = (Group[]) groupList.toArray(groupArray); groupArray = (Group[]) groupList.toArray(groupArray);
@@ -244,15 +265,22 @@ public class SupervisedItem extends WorkspaceItem
TableRowIterator tri = DatabaseManager.queryTable(context, TableRowIterator tri = DatabaseManager.queryTable(context,
"workspaceitem", "workspaceitem",
query,ep.getID()); query,ep.getID());
while (tri.hasNext()) try
{ {
TableRow row = tri.next(); while (tri.hasNext())
SupervisedItem si = new SupervisedItem(context, row); {
sItems.add(si); TableRow row = tri.next();
SupervisedItem si = new SupervisedItem(context, row);
sItems.add(si);
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
tri.close();
} }
tri.close();
SupervisedItem[] siArray = new SupervisedItem[sItems.size()]; SupervisedItem[] siArray = new SupervisedItem[sItems.size()];
siArray = (SupervisedItem[]) sItems.toArray(siArray); siArray = (SupervisedItem[]) sItems.toArray(siArray);

View File

@@ -312,23 +312,30 @@ public class WorkspaceItem implements InProgressSubmission
"ORDER BY workspaceitem.workspace_item_id", "ORDER BY workspaceitem.workspace_item_id",
ep.getID()); ep.getID());
while (tri.hasNext()) try
{ {
TableRow row = tri.next(); while (tri.hasNext())
// Check the cache
WorkspaceItem wi = (WorkspaceItem) context.fromCache(
WorkspaceItem.class, row.getIntColumn("workspace_item_id"));
if (wi == null)
{ {
wi = new WorkspaceItem(context, row); TableRow row = tri.next();
}
wsItems.add(wi); // Check the cache
WorkspaceItem wi = (WorkspaceItem) context.fromCache(
WorkspaceItem.class, row.getIntColumn("workspace_item_id"));
if (wi == null)
{
wi = new WorkspaceItem(context, row);
}
wsItems.add(wi);
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
tri.close();
} }
// close the TableRowIterator to free up resources
tri.close();
WorkspaceItem[] wsArray = new WorkspaceItem[wsItems.size()]; WorkspaceItem[] wsArray = new WorkspaceItem[wsItems.size()];
wsArray = (WorkspaceItem[]) wsItems.toArray(wsArray); wsArray = (WorkspaceItem[]) wsItems.toArray(wsArray);
@@ -356,24 +363,31 @@ public class WorkspaceItem implements InProgressSubmission
"workspaceitem.collection_id= ? ", "workspaceitem.collection_id= ? ",
c.getID()); c.getID());
while (tri.hasNext()) try
{ {
TableRow row = tri.next(); while (tri.hasNext())
// Check the cache
WorkspaceItem wi = (WorkspaceItem) context.fromCache(
WorkspaceItem.class, row.getIntColumn("workspace_item_id"));
// not in cache? turn row into workspaceitem
if (wi == null)
{ {
wi = new WorkspaceItem(context, row); TableRow row = tri.next();
}
wsItems.add(wi); // Check the cache
WorkspaceItem wi = (WorkspaceItem) context.fromCache(
WorkspaceItem.class, row.getIntColumn("workspace_item_id"));
// not in cache? turn row into workspaceitem
if (wi == null)
{
wi = new WorkspaceItem(context, row);
}
wsItems.add(wi);
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
tri.close();
} }
// close the TableRowIterator to free up resources
tri.close();
WorkspaceItem[] wsArray = new WorkspaceItem[wsItems.size()]; WorkspaceItem[] wsArray = new WorkspaceItem[wsItems.size()];
wsArray = (WorkspaceItem[]) wsItems.toArray(wsArray); wsArray = (WorkspaceItem[]) wsItems.toArray(wsArray);
@@ -397,24 +411,31 @@ public class WorkspaceItem implements InProgressSubmission
"workspaceitem", "workspaceitem",
query); query);
while (tri.hasNext()) try
{ {
TableRow row = tri.next(); while (tri.hasNext())
// Check the cache
WorkspaceItem wi = (WorkspaceItem) context.fromCache(
WorkspaceItem.class, row.getIntColumn("workspace_item_id"));
// not in cache? turn row into workspaceitem
if (wi == null)
{ {
wi = new WorkspaceItem(context, row); TableRow row = tri.next();
// Check the cache
WorkspaceItem wi = (WorkspaceItem) context.fromCache(
WorkspaceItem.class, row.getIntColumn("workspace_item_id"));
// not in cache? turn row into workspaceitem
if (wi == null)
{
wi = new WorkspaceItem(context, row);
}
wsItems.add(wi);
} }
wsItems.add(wi);
} }
finally
tri.close(); {
// close the TableRowIterator to free up resources
if (tri != null)
tri.close();
}
WorkspaceItem[] wsArray = new WorkspaceItem[wsItems.size()]; WorkspaceItem[] wsArray = new WorkspaceItem[wsItems.size()];
wsArray = (WorkspaceItem[]) wsItems.toArray(wsArray); wsArray = (WorkspaceItem[]) wsItems.toArray(wsArray);

View File

@@ -260,9 +260,11 @@ public class MODSDisseminationCrosswalk extends SelfNamedPlugin
File.separator + "config" + File.separator; File.separator + "config" + File.separator;
File propsFile = new File(parent, propsFilename); File propsFile = new File(parent, propsFilename);
Properties modsConfig = new Properties(); Properties modsConfig = new Properties();
FileInputStream pfs = null;
try try
{ {
modsConfig.load(new FileInputStream(propsFile)); pfs = new FileInputStream(propsFile);
modsConfig.load(pfs);
} }
catch (IOException e) catch (IOException e)
{ {
@@ -270,6 +272,12 @@ public class MODSDisseminationCrosswalk extends SelfNamedPlugin
throw new CrosswalkInternalException("MODS crosswalk cannot "+ throw new CrosswalkInternalException("MODS crosswalk cannot "+
"open config file: "+e.toString()); "open config file: "+e.toString());
} }
finally
{
if (pfs != null)
try { pfs.close(); } catch (IOException ioe) { }
}
modsMap = new HashMap(); modsMap = new HashMap();
Enumeration pe = modsConfig.propertyNames(); Enumeration pe = modsConfig.propertyNames();
while (pe.hasMoreElements()) while (pe.hasMoreElements())

View File

@@ -280,7 +280,17 @@ public class QDCCrosswalk extends SelfNamedPlugin
File.separator + "config" + File.separator; File.separator + "config" + File.separator;
File propsFile = new File(parent, propsFilename); File propsFile = new File(parent, propsFilename);
Properties qdcProps = new Properties(); Properties qdcProps = new Properties();
qdcProps.load(new FileInputStream(propsFile)); FileInputStream pfs = null;
try
{
pfs = new FileInputStream(propsFile);
qdcProps.load(pfs);
}
finally
{
if (pfs != null)
try { pfs.close(); } catch (IOException ioe) { }
}
// grovel properties to initialize qdc->element and element->qdc maps. // grovel properties to initialize qdc->element and element->qdc maps.
// evaluate the XML fragment with a wrapper including namespaces. // evaluate the XML fragment with a wrapper including namespaces.

View File

@@ -120,7 +120,16 @@ public class XHTMLHeadDisseminationCrosswalk extends SelfNamedPlugin implements
// Read in configuration // Read in configuration
Properties crosswalkProps = new Properties(); Properties crosswalkProps = new Properties();
crosswalkProps.load(new FileInputStream(config)); FileInputStream fis = new FileInputStream(config);
try
{
crosswalkProps.load(fis);
}
finally
{
if (fis != null)
try { fis.close(); } catch (IOException ioe) { }
}
Enumeration e = crosswalkProps.keys(); Enumeration e = crosswalkProps.keys();
while (e.hasMoreElements()) while (e.hasMoreElements())

View File

@@ -177,9 +177,12 @@ public class ConfigurationManager
{ {
// Load in default license // Load in default license
FileReader fr = null;
BufferedReader br = null;
try try
{ {
BufferedReader br = new BufferedReader(new FileReader(licenseFile)); fr = new FileReader(licenseFile);
br = new BufferedReader(fr);
String lineIn; String lineIn;
license = ""; license = "";
while ((lineIn = br.readLine()) != null) while ((lineIn = br.readLine()) != null)
@@ -195,6 +198,15 @@ public class ConfigurationManager
// configuration we can't do anything // configuration we can't do anything
System.exit(1); System.exit(1);
} }
finally
{
if (br != null)
try { br.close(); } catch (IOException ioe) { }
if (fr != null)
try { fr.close(); } catch (IOException ioe) { }
}
return license; return license;
} }
@@ -480,7 +492,8 @@ public class ConfigurationManager
protected static File getConfigurationFile() protected static File getConfigurationFile()
{ {
// in case it hasn't been done yet. // in case it hasn't been done yet.
loadConfig(null); if (loadedFile == null)
loadConfig(null);
return loadedFile; return loadedFile;
} }
@@ -494,7 +507,7 @@ public class ConfigurationManager
* The <code>dspace.cfg</code> configuration file to use, or * The <code>dspace.cfg</code> configuration file to use, or
* <code>null</code> to try default locations * <code>null</code> to try default locations
*/ */
public static void loadConfig(String configFile) public static synchronized void loadConfig(String configFile)
{ {
if (properties != null) if (properties != null)
@@ -505,6 +518,7 @@ public class ConfigurationManager
URL url = null; URL url = null;
InputStream is = null;
try try
{ {
String configProperty = null; String configProperty = null;
@@ -556,7 +570,8 @@ public class ConfigurationManager
else else
{ {
properties = new Properties(); properties = new Properties();
properties.load(url.openStream()); is = url.openStream();
properties.load(is);
// walk values, interpolating any embedded references. // walk values, interpolating any embedded references.
for (Enumeration pe = properties.propertyNames(); pe.hasMoreElements(); ) for (Enumeration pe = properties.propertyNames(); pe.hasMoreElements(); )
@@ -577,16 +592,25 @@ public class ConfigurationManager
// configuration we can't do anything // configuration we can't do anything
throw new RuntimeException("Cannot load configuration: " + url, e); throw new RuntimeException("Cannot load configuration: " + url, e);
} }
finally
{
if (is != null)
try { is.close(); } catch (IOException ioe) { }
}
// Load in default license // Load in default license
File licenseFile = new File(getProperty("dspace.dir") + File.separator File licenseFile = new File(getProperty("dspace.dir") + File.separator
+ "config" + File.separator + "default.license"); + "config" + File.separator + "default.license");
FileInputStream fir = null;
InputStreamReader ir = null;
BufferedReader br = null;
try try
{ {
FileInputStream fir = new FileInputStream(licenseFile); fir = new FileInputStream(licenseFile);
InputStreamReader ir = new InputStreamReader(fir, "UTF-8"); ir = new InputStreamReader(fir, "UTF-8");
BufferedReader br = new BufferedReader(ir); br = new BufferedReader(ir);
String lineIn; String lineIn;
license = ""; license = "";
@@ -605,7 +629,18 @@ public class ConfigurationManager
// FIXME: Maybe something more graceful here, but with the // FIXME: Maybe something more graceful here, but with the
// configuration we can't do anything // configuration we can't do anything
throw new RuntimeException("Cannot load license: " + licenseFile.toString(),e); throw new RuntimeException("Cannot load license: " + licenseFile.toString(),e);
} }
finally
{
if (br != null)
try { br.close(); } catch (IOException ioe) { }
if (ir != null)
try { ir.close(); } catch (IOException ioe) { }
if (fir != null)
try { fir.close(); } catch (IOException ioe) { }
}

View File

@@ -84,9 +84,8 @@ public class LogManager
contextExtraInfo = "no_context"; contextExtraInfo = "no_context";
} }
String result = new String(email + ":" + contextExtraInfo + ":" StringBuilder result = new StringBuilder();
+ action + ":" + extrainfo); result.append(email).append(":").append(contextExtraInfo).append(":").append(action).append(":").append(extrainfo);
return result.toString();
return result;
} }
} }

View File

@@ -564,6 +564,9 @@ public class PluginManager
public static void checkConfiguration() public static void checkConfiguration()
throws IOException throws IOException
{ {
FileReader fr = null;
BufferedReader cr = null;
/* XXX TODO: (maybe) test that implementation class is really a /* XXX TODO: (maybe) test that implementation class is really a
* subclass or impl of the plugin "interface" * subclass or impl of the plugin "interface"
*/ */
@@ -574,51 +577,63 @@ public class PluginManager
Map namedKey = new HashMap(); Map namedKey = new HashMap();
Map selfnamedKey = new HashMap(); Map selfnamedKey = new HashMap();
Map reusableKey = new HashMap(); Map reusableKey = new HashMap();
HashMap keyMap = new HashMap();
// 1. First pass -- grovel the actual config file to check for // 1. First pass -- grovel the actual config file to check for
// duplicate keys, since Properties class hides them from us. // duplicate keys, since Properties class hides them from us.
// Also build lists of each type of key, check for misspellings. // Also build lists of each type of key, check for misspellings.
File config = ConfigurationManager.getConfigurationFile(); File config = ConfigurationManager.getConfigurationFile();
BufferedReader cr = new BufferedReader(new FileReader(config)); try
String line = null;
boolean continued = false;
HashMap keyMap = new HashMap();
Pattern keyPattern = Pattern.compile("([^\\s\\=\\:]+)");
while ((line = cr.readLine()) != null)
{ {
line = line.trim(); fr = new FileReader(config);
if (line.startsWith("!") || line.startsWith("#")) cr = new BufferedReader(fr);
continued = false; String line = null;
else boolean continued = false;
Pattern keyPattern = Pattern.compile("([^\\s\\=\\:]+)");
while ((line = cr.readLine()) != null)
{ {
if (!continued && line.startsWith("plugin.")) line = line.trim();
if (line.startsWith("!") || line.startsWith("#"))
continued = false;
else
{ {
Matcher km = keyPattern.matcher(line); if (!continued && line.startsWith("plugin."))
if (km.find())
{ {
String key = line.substring(0, km.end(1)); Matcher km = keyPattern.matcher(line);
if (keyMap.containsKey(key)) if (km.find())
log.error("Duplicate key \""+key+"\" in DSpace configuration file="+config.toString()); {
else String key = line.substring(0, km.end(1));
keyMap.put(key, key); if (keyMap.containsKey(key))
log.error("Duplicate key \""+key+"\" in DSpace configuration file="+config.toString());
else
keyMap.put(key, key);
if (key.startsWith(SINGLE_PREFIX)) if (key.startsWith(SINGLE_PREFIX))
singleKey.put(key.substring(SINGLE_PREFIX.length()), key); singleKey.put(key.substring(SINGLE_PREFIX.length()), key);
else if (key.startsWith(SEQUENCE_PREFIX)) else if (key.startsWith(SEQUENCE_PREFIX))
sequenceKey.put(key.substring(SEQUENCE_PREFIX.length()), key); sequenceKey.put(key.substring(SEQUENCE_PREFIX.length()), key);
else if (key.startsWith(NAMED_PREFIX)) else if (key.startsWith(NAMED_PREFIX))
namedKey.put(key.substring(NAMED_PREFIX.length()), key); namedKey.put(key.substring(NAMED_PREFIX.length()), key);
else if (key.startsWith(SELFNAMED_PREFIX)) else if (key.startsWith(SELFNAMED_PREFIX))
selfnamedKey.put(key.substring(SELFNAMED_PREFIX.length()), key); selfnamedKey.put(key.substring(SELFNAMED_PREFIX.length()), key);
else if (key.startsWith(REUSABLE_PREFIX)) else if (key.startsWith(REUSABLE_PREFIX))
reusableKey.put(key.substring(REUSABLE_PREFIX.length()), key); reusableKey.put(key.substring(REUSABLE_PREFIX.length()), key);
else else
log.error("Key with unknown prefix \""+key+"\" in DSpace configuration file="+config.toString()); log.error("Key with unknown prefix \""+key+"\" in DSpace configuration file="+config.toString());
}
} }
continued = line.length() > 0 && line.charAt(line.length()-1) == '\\';
} }
continued = line.length() > 0 && line.charAt(line.length()-1) == '\\';
} }
} }
finally
{
if (cr != null)
try { cr.close(); } catch (IOException ioe) { }
if (fr != null)
try { fr.close(); } catch (IOException ioe) { }
}
// 1.1 Sanity check, make sure keyMap == set of keys from Configuration // 1.1 Sanity check, make sure keyMap == set of keys from Configuration
Enumeration pne = ConfigurationManager.propertyNames(); Enumeration pne = ConfigurationManager.propertyNames();

View File

@@ -908,36 +908,57 @@ public class EPerson extends DSpaceObject
"SELECT * from item where submitter_id= ? ", "SELECT * from item where submitter_id= ? ",
getID()); getID());
if (tri.hasNext()) try
{ {
tableList.add("item"); if (tri.hasNext())
{
tableList.add("item");
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
tri.close();
} }
tri.close();
// check for eperson in workflowitem table // check for eperson in workflowitem table
tri = DatabaseManager.query(myContext, tri = DatabaseManager.query(myContext,
"SELECT * from workflowitem where owner= ? ", "SELECT * from workflowitem where owner= ? ",
getID()); getID());
if (tri.hasNext()) try
{ {
tableList.add("workflowitem"); if (tri.hasNext())
{
tableList.add("workflowitem");
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
tri.close();
} }
tri.close();
// check for eperson in tasklistitem table // check for eperson in tasklistitem table
tri = DatabaseManager.query(myContext, tri = DatabaseManager.query(myContext,
"SELECT * from tasklistitem where eperson_id= ? ", "SELECT * from tasklistitem where eperson_id= ? ",
getID()); getID());
if (tri.hasNext()) try
{ {
tableList.add("tasklistitem"); if (tri.hasNext())
{
tableList.add("tasklistitem");
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
tri.close();
} }
tri.close();
// the list of tables can be used to construct an error message // the list of tables can be used to construct an error message
// explaining to the user why the eperson cannot be deleted. // explaining to the user why the eperson cannot be deleted.

View File

@@ -141,26 +141,33 @@ public class Group extends DSpaceObject
"epersongroup2eperson.eperson_group_id= ?", "epersongroup2eperson.eperson_group_id= ?",
myRow.getIntColumn("eperson_group_id")); myRow.getIntColumn("eperson_group_id"));
while (tri.hasNext()) try
{ {
TableRow r = (TableRow) tri.next(); while (tri.hasNext())
// First check the cache
EPerson fromCache = (EPerson) myContext.fromCache(
EPerson.class, r.getIntColumn("eperson_id"));
if (fromCache != null)
{ {
epeople.add(fromCache); TableRow r = (TableRow) tri.next();
}
else // First check the cache
{ EPerson fromCache = (EPerson) myContext.fromCache(
epeople.add(new EPerson(myContext, r)); EPerson.class, r.getIntColumn("eperson_id"));
if (fromCache != null)
{
epeople.add(fromCache);
}
else
{
epeople.add(new EPerson(myContext, r));
}
} }
} }
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
tri.close();
}
tri.close();
// now get Group objects // now get Group objects
tri = DatabaseManager.queryTable(myContext,"epersongroup", tri = DatabaseManager.queryTable(myContext,"epersongroup",
"SELECT epersongroup.* FROM epersongroup, group2group WHERE " + "SELECT epersongroup.* FROM epersongroup, group2group WHERE " +
@@ -168,25 +175,32 @@ public class Group extends DSpaceObject
"group2group.parent_id= ? ", "group2group.parent_id= ? ",
myRow.getIntColumn("eperson_group_id")); myRow.getIntColumn("eperson_group_id"));
while (tri.hasNext()) try
{ {
TableRow r = (TableRow) tri.next(); while (tri.hasNext())
// First check the cache
Group fromCache = (Group) myContext.fromCache(Group.class,
r.getIntColumn("eperson_group_id"));
if (fromCache != null)
{ {
groups.add(fromCache); TableRow r = (TableRow) tri.next();
}
else // First check the cache
{ Group fromCache = (Group) myContext.fromCache(Group.class,
groups.add(new Group(myContext, r)); r.getIntColumn("eperson_group_id"));
if (fromCache != null)
{
groups.add(fromCache);
}
else
{
groups.add(new Group(myContext, r));
}
} }
} }
finally
tri.close(); {
// close the TableRowIterator to free up resources
if (tri != null)
tri.close();
}
} }
catch (Exception e) catch (Exception e)
@@ -449,16 +463,23 @@ public class Group extends DSpaceObject
Set<Integer> groupIDs = new HashSet<Integer>(); Set<Integer> groupIDs = new HashSet<Integer>();
while (tri.hasNext()) try
{ {
TableRow row = tri.next(); while (tri.hasNext())
{
TableRow row = tri.next();
int childID = row.getIntColumn("eperson_group_id"); int childID = row.getIntColumn("eperson_group_id");
groupIDs.add(new Integer(childID)); groupIDs.add(new Integer(childID));
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
tri.close();
} }
tri.close();
// Also need to get all "Special Groups" user is a member of! // Also need to get all "Special Groups" user is a member of!
// Otherwise, you're ignoring the user's membership to these groups! // Otherwise, you're ignoring the user's membership to these groups!
@@ -501,16 +522,23 @@ public class Group extends DSpaceObject
"SELECT * FROM group2groupcache WHERE " + groupQuery, "SELECT * FROM group2groupcache WHERE " + groupQuery,
parameters); parameters);
while (tri.hasNext()) try
{ {
TableRow row = tri.next(); while (tri.hasNext())
{
TableRow row = tri.next();
int parentID = row.getIntColumn("parent_id"); int parentID = row.getIntColumn("parent_id");
groupIDs.add(new Integer(parentID)); groupIDs.add(new Integer(parentID));
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
tri.close();
} }
tri.close();
return groupIDs; return groupIDs;
} }
@@ -570,16 +598,23 @@ public class Group extends DSpaceObject
Set<Integer> groupIDs = new HashSet<Integer>(); Set<Integer> groupIDs = new HashSet<Integer>();
while (tri.hasNext()) try
{ {
TableRow row = tri.next(); while (tri.hasNext())
{
TableRow row = tri.next();
int childID = row.getIntColumn("child_id"); int childID = row.getIntColumn("child_id");
groupIDs.add(new Integer(childID)); groupIDs.add(new Integer(childID));
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
tri.close();
} }
tri.close();
// now we have all the groups (including this one) // now we have all the groups (including this one)
// it is time to find all the EPeople who belong to those groups // it is time to find all the EPeople who belong to those groups
@@ -612,16 +647,23 @@ public class Group extends DSpaceObject
"SELECT * FROM epersongroup2eperson WHERE " + epersonQuery, "SELECT * FROM epersongroup2eperson WHERE " + epersonQuery,
parameters); parameters);
while (tri.hasNext()) try
{ {
TableRow row = tri.next(); while (tri.hasNext())
{
TableRow row = tri.next();
int epersonID = row.getIntColumn("eperson_id"); int epersonID = row.getIntColumn("eperson_id");
epeopleIDs.add(new Integer(epersonID)); epeopleIDs.add(new Integer(epersonID));
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
tri.close();
} }
tri.close();
return epeopleIDs; return epeopleIDs;
} }
@@ -1135,32 +1177,39 @@ public class Group extends DSpaceObject
Map<Integer,Set<Integer>> parents = new HashMap<Integer,Set<Integer>>(); Map<Integer,Set<Integer>> parents = new HashMap<Integer,Set<Integer>>();
while (tri.hasNext()) try
{ {
TableRow row = (TableRow) tri.next(); while (tri.hasNext())
Integer parentID = new Integer(row.getIntColumn("parent_id"));
Integer childID = new Integer(row.getIntColumn("child_id"));
// if parent doesn't have an entry, create one
if (!parents.containsKey(parentID))
{ {
Set<Integer> children = new HashSet<Integer>(); TableRow row = (TableRow) tri.next();
// add child id to the list Integer parentID = new Integer(row.getIntColumn("parent_id"));
children.add(childID); Integer childID = new Integer(row.getIntColumn("child_id"));
parents.put(parentID, children);
} // if parent doesn't have an entry, create one
else if (!parents.containsKey(parentID))
{ {
// parent has an entry, now add the child to the parent's record Set<Integer> children = new HashSet<Integer>();
// of children
Set<Integer> children = parents.get(parentID); // add child id to the list
children.add(childID); children.add(childID);
parents.put(parentID, children);
}
else
{
// parent has an entry, now add the child to the parent's record
// of children
Set<Integer> children = parents.get(parentID);
children.add(childID);
}
} }
} }
finally
tri.close(); {
// close the TableRowIterator to free up resources
if (tri != null)
tri.close();
}
// now parents is a hash of all of the IDs of groups that are parents // now parents is a hash of all of the IDs of groups that are parents
// and each hash entry is a hash of all of the IDs of children of those // and each hash entry is a hash of all of the IDs of children of those

View File

@@ -112,20 +112,27 @@ public class Subscribe
" AND collection_id= ? ", " AND collection_id= ? ",
eperson.getID(),collection.getID()); eperson.getID(),collection.getID());
if (!r.hasNext()) try
{ {
// Not subscribed, so add them if (!r.hasNext())
TableRow row = DatabaseManager.create(context, "subscription"); {
row.setColumn("eperson_id", eperson.getID()); // Not subscribed, so add them
row.setColumn("collection_id", collection.getID()); TableRow row = DatabaseManager.create(context, "subscription");
DatabaseManager.update(context, row); row.setColumn("eperson_id", eperson.getID());
row.setColumn("collection_id", collection.getID());
DatabaseManager.update(context, row);
log.info(LogManager.getHeader(context, "subscribe", log.info(LogManager.getHeader(context, "subscribe",
"eperson_id=" + eperson.getID() + ",collection_id=" "eperson_id=" + eperson.getID() + ",collection_id="
+ collection.getID())); + collection.getID()));
}
}
finally
{
// close the TableRowIterator to free up resources
if (r != null)
r.close();
} }
r.close();
} }
else else
{ {
@@ -198,15 +205,22 @@ public class Subscribe
List collections = new ArrayList(); List collections = new ArrayList();
while (tri.hasNext()) try
{ {
TableRow row = tri.next(); while (tri.hasNext())
{
TableRow row = tri.next();
collections.add(Collection.find(context, row collections.add(Collection.find(context, row
.getIntColumn("collection_id"))); .getIntColumn("collection_id")));
}
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
tri.close();
} }
tri.close();
Collection[] collArray = new Collection[collections.size()]; Collection[] collArray = new Collection[collections.size()];
@@ -231,11 +245,17 @@ public class Subscribe
"SELECT * FROM subscription WHERE eperson_id= ? " + "SELECT * FROM subscription WHERE eperson_id= ? " +
"AND collection_id= ? ", "AND collection_id= ? ",
eperson.getID(),collection.getID()); eperson.getID(),collection.getID());
boolean result = tri.hasNext(); try
tri.close(); {
return tri.hasNext();
return result; }
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
tri.close();
}
} }
/** /**
@@ -265,42 +285,49 @@ public class Subscribe
EPerson currentEPerson = null; EPerson currentEPerson = null;
List collections = null; // List of Collections List collections = null; // List of Collections
// Go through the list collating subscriptions for each e-person try
while (tri.hasNext())
{ {
TableRow row = tri.next(); // Go through the list collating subscriptions for each e-person
while (tri.hasNext())
// Does this row relate to the same e-person as the last?
if ((currentEPerson == null)
|| (row.getIntColumn("eperson_id") != currentEPerson
.getID()))
{ {
// New e-person. Send mail for previous e-person TableRow row = tri.next();
if (currentEPerson != null)
{
try // Does this row relate to the same e-person as the last?
if ((currentEPerson == null)
|| (row.getIntColumn("eperson_id") != currentEPerson
.getID()))
{
// New e-person. Send mail for previous e-person
if (currentEPerson != null)
{ {
sendEmail(context, currentEPerson, collections, test);
} try
catch (MessagingException me) {
{ sendEmail(context, currentEPerson, collections, test);
log.error("Failed to send subscription to eperson_id=" }
+ currentEPerson.getID()); catch (MessagingException me)
log.error(me); {
log.error("Failed to send subscription to eperson_id="
+ currentEPerson.getID());
log.error(me);
}
} }
currentEPerson = EPerson.find(context, row
.getIntColumn("eperson_id"));
collections = new ArrayList();
} }
currentEPerson = EPerson.find(context, row collections.add(Collection.find(context, row
.getIntColumn("eperson_id")); .getIntColumn("collection_id")));
collections = new ArrayList();
} }
collections.add(Collection.find(context, row
.getIntColumn("collection_id")));
} }
finally
tri.close(); {
// close the TableRowIterator to free up resources
if (tri != null)
tri.close();
}
// Process the last person // Process the last person
if (currentEPerson != null) if (currentEPerson != null)

View File

@@ -108,10 +108,17 @@ public class Supervisor {
TableRowIterator tri = DatabaseManager.queryTable(context, TableRowIterator tri = DatabaseManager.queryTable(context,
"epersongroup2workspaceitem", "epersongroup2workspaceitem",
query,groupID,wsItemID); query,groupID,wsItemID);
boolean result = tri.hasNext(); try
tri.close(); {
return result; return tri.hasNext();
}
finally
{
// close the TableRowIterator to free up resources
if (tri != null)
tri.close();
}
} }
/** /**

View File

@@ -327,13 +327,20 @@ public class HandleManager
TableRowIterator iterator = DatabaseManager.queryTable(context, null, sql, prefix+"%"); TableRowIterator iterator = DatabaseManager.queryTable(context, null, sql, prefix+"%");
List results = new ArrayList(); List results = new ArrayList();
while (iterator.hasNext()) try
{ {
TableRow row = (TableRow) iterator.next(); while (iterator.hasNext())
results.add(row.getStringColumn("handle")); {
TableRow row = (TableRow) iterator.next();
results.add(row.getStringColumn("handle"));
}
}
finally
{
// close the TableRowIterator to free up resources
if (iterator != null)
iterator.close();
} }
iterator.close();
return results; return results;
} }

View File

@@ -228,43 +228,51 @@ public class Harvest
List infoObjects = new LinkedList(); List infoObjects = new LinkedList();
int index = 0; int index = 0;
// Process results of query into HarvestedItemInfo objects try
while (tri.hasNext())
{ {
TableRow row = tri.next(); // Process results of query into HarvestedItemInfo objects
while (tri.hasNext())
/*
* This conditional ensures that we only process items within any
* constraints specified by 'offset' and 'limit' parameters.
*/
if ((index >= offset)
&& ((limit == 0) || (index < (offset + limit))))
{ {
HarvestedItemInfo itemInfo = new HarvestedItemInfo(); TableRow row = tri.next();
itemInfo.context = context;
itemInfo.handle = row.getStringColumn("handle");
itemInfo.itemID = row.getIntColumn("resource_id");
itemInfo.datestamp = row.getDateColumn("last_modified");
itemInfo.withdrawn = row.getBooleanColumn("withdrawn");
if (collections) /*
* This conditional ensures that we only process items within any
* constraints specified by 'offset' and 'limit' parameters.
*/
if ((index >= offset)
&& ((limit == 0) || (index < (offset + limit))))
{ {
fillCollections(context, itemInfo); HarvestedItemInfo itemInfo = new HarvestedItemInfo();
itemInfo.context = context;
itemInfo.handle = row.getStringColumn("handle");
itemInfo.itemID = row.getIntColumn("resource_id");
itemInfo.datestamp = row.getDateColumn("last_modified");
itemInfo.withdrawn = row.getBooleanColumn("withdrawn");
if (collections)
{
fillCollections(context, itemInfo);
}
if (items)
{
// Get the item
itemInfo.item = Item.find(context, itemInfo.itemID);
}
infoObjects.add(itemInfo);
} }
if (items) index++;
{
// Get the item
itemInfo.item = Item.find(context, itemInfo.itemID);
}
infoObjects.add(itemInfo);
} }
index++;
} }
tri.close(); finally
{
// close the TableRowIterator to free up resources
if (tri != null)
tri.close();
}
return infoObjects; return infoObjects;
} }

View File

@@ -107,24 +107,24 @@ public class OrderFormat
{ {
return delegate.makeSortString(value, language); return delegate.makeSortString(value, language);
} }
}
// No delegates found, so apply defaults
// No delegates found, so apply defaults if (type.equalsIgnoreCase(OrderFormat.AUTHOR) && authorDelegate != null)
if (type.equalsIgnoreCase(OrderFormat.AUTHOR) && authorDelegate != null) {
{ return authorDelegate.makeSortString(value, language);
return authorDelegate.makeSortString(value, language); }
if (type.equalsIgnoreCase(OrderFormat.TITLE) && titleDelegate != null)
{
return titleDelegate.makeSortString(value, language);
}
if (type.equalsIgnoreCase(OrderFormat.TEXT) && textDelegate != null)
{
return textDelegate.makeSortString(value, language);
}
} }
if (type.equalsIgnoreCase(OrderFormat.TITLE) && titleDelegate != null)
{
return titleDelegate.makeSortString(value, language);
}
if (type.equalsIgnoreCase(OrderFormat.TEXT) && textDelegate != null)
{
return textDelegate.makeSortString(value, language);
}
return value; return value;
} }

View File

@@ -304,16 +304,21 @@ public class SortOption
{ {
if (SortOption.sortOptionsMap != null) if (SortOption.sortOptionsMap != null)
return SortOption.sortOptionsMap; return SortOption.sortOptionsMap;
SortOption.sortOptionsMap = new HashMap<Integer, SortOption>(); synchronized (SortOption.class)
synchronized (SortOption.sortOptionsMap)
{ {
for (SortOption so : SortOption.getSortOptions()) if (SortOption.sortOptionsMap == null)
{ {
SortOption.sortOptionsMap.put(new Integer(so.getNumber()), so); Map<Integer, SortOption> newSortOptionsMap = new HashMap<Integer, SortOption>();
for (SortOption so : SortOption.getSortOptions())
{
newSortOptionsMap.put(new Integer(so.getNumber()), so);
}
SortOption.sortOptionsMap = newSortOptionsMap;
} }
} }
return SortOption.sortOptionsMap; return SortOption.sortOptionsMap;
} }
@@ -327,17 +332,22 @@ public class SortOption
if (SortOption.sortOptionsSet != null) if (SortOption.sortOptionsSet != null)
return SortOption.sortOptionsSet; return SortOption.sortOptionsSet;
SortOption.sortOptionsSet = new HashSet<SortOption>(); synchronized (SortOption.class)
synchronized (SortOption.sortOptionsSet)
{ {
int idx = 1; if (SortOption.sortOptionsSet == null)
String option;
while ( ((option = ConfigurationManager.getProperty("webui.itemlist.sort-option." + idx))) != null)
{ {
SortOption so = new SortOption(idx, option); Set<SortOption> newSortOptionsSet = new HashSet<SortOption>();
SortOption.sortOptionsSet.add(so); int idx = 1;
idx++; String option;
while ( ((option = ConfigurationManager.getProperty("webui.itemlist.sort-option." + idx))) != null)
{
SortOption so = new SortOption(idx, option);
newSortOptionsSet.add(so);
idx++;
}
SortOption.sortOptionsSet = newSortOptionsSet;
} }
} }

View File

@@ -352,9 +352,13 @@ public class BitstreamStorageManager
bitstream.setColumn("size_bytes", file.length()); bitstream.setColumn("size_bytes", file.length());
bitstream.setColumn("checksum", Utils.toHex(dis.getMessageDigest() if (dis != null)
.digest())); {
bitstream.setColumn("checksum_algorithm", "MD5"); bitstream.setColumn("checksum", Utils.toHex(dis.getMessageDigest()
.digest()));
bitstream.setColumn("checksum_algorithm", "MD5");
}
bitstream.setColumn("deleted", false); bitstream.setColumn("deleted", false);
DatabaseManager.update(context, bitstream); DatabaseManager.update(context, bitstream);

View File

@@ -276,10 +276,19 @@ public class DatabaseManager
public static TableRow querySingle(Context context, String query, public static TableRow querySingle(Context context, String query,
Object... parameters) throws SQLException Object... parameters) throws SQLException
{ {
TableRowIterator iterator = query(context, query, parameters); TableRow retRow = null;
TableRowIterator iterator = null;
try
{
iterator = query(context, query, parameters);
retRow = (!iterator.hasNext()) ? null : iterator.next();
}
finally
{
if (iterator != null)
iterator.close();
}
TableRow retRow = (!iterator.hasNext()) ? null : iterator.next();
iterator.close();
return (retRow); return (retRow);
} }
@@ -304,10 +313,18 @@ public class DatabaseManager
public static TableRow querySingleTable(Context context, String table, public static TableRow querySingleTable(Context context, String table,
String query, Object... parameters) throws SQLException String query, Object... parameters) throws SQLException
{ {
TableRow retRow = null;
TableRowIterator iterator = queryTable(context, canonicalize(table), query, parameters); TableRowIterator iterator = queryTable(context, canonicalize(table), query, parameters);
TableRow retRow = (!iterator.hasNext()) ? null : iterator.next(); try
iterator.close(); {
retRow = (!iterator.hasNext()) ? null : iterator.next();
}
finally
{
if (iterator != null)
iterator.close();
}
return (retRow); return (retRow);
} }
@@ -564,26 +581,44 @@ public class DatabaseManager
public static void insert(Context context, TableRow row) public static void insert(Context context, TableRow row)
throws SQLException throws SQLException
{ {
int newID = -1;
String table = canonicalize(row.getTable()); String table = canonicalize(row.getTable());
Statement statement = null;
ResultSet rs = null;
// Get an ID (primary key) for this row by using the "getnextid" try
// SQL function in Postgres, or directly with sequences in Oracle
String myQuery = "SELECT getnextid('" + table + "') AS result";
if ("oracle".equals(ConfigurationManager.getProperty("db.name")))
{ {
myQuery = "SELECT " + table + "_seq" + ".nextval FROM dual"; // Get an ID (primary key) for this row by using the "getnextid"
// SQL function in Postgres, or directly with sequences in Oracle
String myQuery = "SELECT getnextid('" + table + "') AS result";
if ("oracle".equals(ConfigurationManager.getProperty("db.name")))
{
myQuery = "SELECT " + table + "_seq" + ".nextval FROM dual";
}
statement = context.getDBConnection().createStatement();
rs = statement.executeQuery(myQuery);
rs.next();
newID = rs.getInt(1);
}
finally
{
if (rs != null)
{
try { rs.close(); } catch (SQLException sqle) { }
}
if (statement != null)
{
try { statement.close(); } catch (SQLException sqle) { }
}
} }
Statement statement = context.getDBConnection().createStatement(); if (newID < 0)
ResultSet rs = statement.executeQuery(myQuery); throw new SQLException("Unable to retrieve sequence ID");
rs.next();
int newID = rs.getInt(1);
rs.close();
statement.close();
// Set the ID in the table row object // Set the ID in the table row object
row.setColumn(getPrimaryKeyColumn(table), newID); row.setColumn(getPrimaryKeyColumn(table), newID);
@@ -1354,7 +1389,9 @@ public class DatabaseManager
private static Map retrieveColumnInfo(String table) throws SQLException private static Map retrieveColumnInfo(String table) throws SQLException
{ {
Connection connection = null; Connection connection = null;
ResultSet pkcolumns = null;
ResultSet columns = null;
try try
{ {
String schema = ConfigurationManager.getProperty("db.schema"); String schema = ConfigurationManager.getProperty("db.schema");
@@ -1367,13 +1404,13 @@ public class DatabaseManager
String tname = (table.length() >= max) ? table String tname = (table.length() >= max) ? table
.substring(0, max - 1) : table; .substring(0, max - 1) : table;
ResultSet pkcolumns = metadata.getPrimaryKeys(null, schema, tname); pkcolumns = metadata.getPrimaryKeys(null, schema, tname);
Set pks = new HashSet(); Set pks = new HashSet();
while (pkcolumns.next()) while (pkcolumns.next())
pks.add(pkcolumns.getString(4)); pks.add(pkcolumns.getString(4));
ResultSet columns = metadata.getColumns(null, schema, tname, null); columns = metadata.getColumns(null, schema, tname, null);
while (columns.next()) while (columns.next())
{ {
@@ -1394,9 +1431,19 @@ public class DatabaseManager
} }
finally finally
{ {
if (pkcolumns != null)
{
try { pkcolumns.close(); } catch (SQLException sqle) { }
}
if (columns != null)
{
try { columns.close(); } catch (SQLException sqle) { }
}
if (connection != null) if (connection != null)
{ {
connection.close(); try { connection.close(); } catch (SQLException sqle) { }
} }
} }
} }

View File

@@ -108,7 +108,7 @@ public class TableRowIterator
/** /**
* Finalize -- this method is called when this object is GC-ed. * Finalize -- this method is called when this object is GC-ed.
*/ */
public void finalize() protected void finalize()
{ {
close(); close();
} }

View File

@@ -171,15 +171,21 @@ public class WorkflowItem implements InProgressSubmission
TableRowIterator tri = DatabaseManager.queryTable(c, "workflowitem", TableRowIterator tri = DatabaseManager.queryTable(c, "workflowitem",
"SELECT * FROM workflowitem"); "SELECT * FROM workflowitem");
// make a list of workflow items try
while (tri.hasNext())
{ {
TableRow row = tri.next(); // make a list of workflow items
WorkflowItem wi = new WorkflowItem(c, row); while (tri.hasNext())
wfItems.add(wi); {
TableRow row = tri.next();
WorkflowItem wi = new WorkflowItem(c, row);
wfItems.add(wi);
}
}
finally
{
if (tri != null)
tri.close();
} }
tri.close();
WorkflowItem[] wfArray = new WorkflowItem[wfItems.size()]; WorkflowItem[] wfArray = new WorkflowItem[wfItems.size()];
wfArray = (WorkflowItem[]) wfItems.toArray(wfArray); wfArray = (WorkflowItem[]) wfItems.toArray(wfArray);
@@ -211,23 +217,29 @@ public class WorkflowItem implements InProgressSubmission
"ORDER BY workflowitem.workflow_id", "ORDER BY workflowitem.workflow_id",
ep.getID()); ep.getID());
while (tri.hasNext()) try
{ {
TableRow row = tri.next(); while (tri.hasNext())
// Check the cache
WorkflowItem wi = (WorkflowItem) context.fromCache(
WorkflowItem.class, row.getIntColumn("workflow_id"));
if (wi == null)
{ {
wi = new WorkflowItem(context, row); TableRow row = tri.next();
}
wfItems.add(wi); // Check the cache
WorkflowItem wi = (WorkflowItem) context.fromCache(
WorkflowItem.class, row.getIntColumn("workflow_id"));
if (wi == null)
{
wi = new WorkflowItem(context, row);
}
wfItems.add(wi);
}
}
finally
{
if (tri != null)
tri.close();
} }
tri.close();
WorkflowItem[] wfArray = new WorkflowItem[wfItems.size()]; WorkflowItem[] wfArray = new WorkflowItem[wfItems.size()];
wfArray = (WorkflowItem[]) wfItems.toArray(wfArray); wfArray = (WorkflowItem[]) wfItems.toArray(wfArray);
@@ -255,24 +267,30 @@ public class WorkflowItem implements InProgressSubmission
"workflowitem.collection_id= ? ", "workflowitem.collection_id= ? ",
c.getID()); c.getID());
while (tri.hasNext()) try
{ {
TableRow row = tri.next(); while (tri.hasNext())
// Check the cache
WorkflowItem wi = (WorkflowItem) context.fromCache(
WorkflowItem.class, row.getIntColumn("workflow_id"));
// not in cache? turn row into workflowitem
if (wi == null)
{ {
wi = new WorkflowItem(context, row); TableRow row = tri.next();
}
wsItems.add(wi); // Check the cache
WorkflowItem wi = (WorkflowItem) context.fromCache(
WorkflowItem.class, row.getIntColumn("workflow_id"));
// not in cache? turn row into workflowitem
if (wi == null)
{
wi = new WorkflowItem(context, row);
}
wsItems.add(wi);
}
}
finally
{
if (tri != null)
tri.close();
} }
tri.close();
WorkflowItem[] wsArray = new WorkflowItem[wsItems.size()]; WorkflowItem[] wsArray = new WorkflowItem[wsItems.size()];
wsArray = (WorkflowItem[]) wsItems.toArray(wsArray); wsArray = (WorkflowItem[]) wsItems.toArray(wsArray);

View File

@@ -237,12 +237,18 @@ public class WorkflowManager
TableRowIterator tri = DatabaseManager.queryTable(c, TableRowIterator tri = DatabaseManager.queryTable(c,
"workflowitem", myquery,e.getID()); "workflowitem", myquery,e.getID());
while (tri.hasNext()) try
{ {
mylist.add(new WorkflowItem(c, tri.next())); while (tri.hasNext())
{
mylist.add(new WorkflowItem(c, tri.next()));
}
}
finally
{
if (tri != null)
tri.close();
} }
tri.close();
return mylist; return mylist;
} }
@@ -265,12 +271,18 @@ public class WorkflowManager
TableRowIterator tri = DatabaseManager TableRowIterator tri = DatabaseManager
.queryTable(c, "workflowitem", myquery, e.getID()); .queryTable(c, "workflowitem", myquery, e.getID());
while (tri.hasNext()) try
{ {
mylist.add(new WorkflowItem(c, tri.next())); while (tri.hasNext())
{
mylist.add(new WorkflowItem(c, tri.next()));
}
}
finally
{
if (tri != null)
tri.close();
} }
tri.close();
return mylist; return mylist;
} }

View File

@@ -190,7 +190,7 @@ public class FeedServlet extends DSpaceServlet
//as long as this is not a site wide feed, //as long as this is not a site wide feed,
//attempt to retrieve the Collection or Community object //attempt to retrieve the Collection or Community object
if(!handle.equals(SITE_FEED_KEY)) if(handle != null && !handle.equals(SITE_FEED_KEY))
{ {
// Determine if handle is a valid reference // Determine if handle is a valid reference
dso = HandleManager.resolveToObject(context, handle); dso = HandleManager.resolveToObject(context, handle);
@@ -371,7 +371,7 @@ public class FeedServlet extends DSpaceServlet
: HandleManager.getCanonicalForm(dso.getHandle()); : HandleManager.getCanonicalForm(dso.getHandle());
// put in container-level data // put in container-level data
channel.setDescription(description.replaceAll("\\p{Cntrl}", "")); channel.setDescription(description == null ? "" : description.replaceAll("\\p{Cntrl}", ""));
channel.setLink(objectUrl); channel.setLink(objectUrl);
//build channel title by passing in type and title //build channel title by passing in type and title
String channelTitle = MessageFormat.format(labels.getString(clazz + ".feed.title"), String channelTitle = MessageFormat.format(labels.getString(clazz + ".feed.title"),

View File

@@ -122,6 +122,7 @@ public class StatisticsServlet extends org.dspace.app.webui.servlet.DSpaceServle
HttpServletRequest request, HttpServletResponse response) HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException, SQLException, AuthorizeException throws ServletException, IOException, SQLException, AuthorizeException
{ {
StringBuffer report = new StringBuffer();
String date = (String) request.getParameter("date"); String date = (String) request.getParameter("date");
request.setAttribute("date", date); request.setAttribute("date", date);
@@ -135,123 +136,135 @@ public class StatisticsServlet extends org.dspace.app.webui.servlet.DSpaceServle
FileInputStream fir = null; FileInputStream fir = null;
InputStreamReader ir = null; InputStreamReader ir = null;
BufferedReader br = null; BufferedReader br = null;
List monthsList = new ArrayList(); try
Pattern monthly = Pattern.compile("report-([0-9][0-9][0-9][0-9]-[0-9]+)\\.html");
Pattern general = Pattern.compile("report-general-([0-9]+-[0-9]+-[0-9]+)\\.html");
// FIXME: this whole thing is horribly inflexible and needs serious
// work; but as a basic proof of concept will suffice
// if no date is passed then we want to get the most recent general
// report
if (date == null)
{ {
request.setAttribute("general", new Boolean(true)); List monthsList = new ArrayList();
SimpleDateFormat sdf = new SimpleDateFormat("yyyy'-'M'-'dd"); Pattern monthly = Pattern.compile("report-([0-9][0-9][0-9][0-9]-[0-9]+)\\.html");
Date mostRecentDate = null; Pattern general = Pattern.compile("report-general-([0-9]+-[0-9]+-[0-9]+)\\.html");
// FIXME: this whole thing is horribly inflexible and needs serious
// work; but as a basic proof of concept will suffice
// if no date is passed then we want to get the most recent general
// report
if (date == null)
{
request.setAttribute("general", new Boolean(true));
SimpleDateFormat sdf = new SimpleDateFormat("yyyy'-'M'-'dd");
Date mostRecentDate = null;
for (int i = 0; i < reports.length; i++)
{
Matcher matchGeneral = general.matcher(reports[i].getName());
if (matchGeneral.matches())
{
Date parsedDate = null;
try
{
parsedDate = sdf.parse(matchGeneral.group(1).trim());
}
catch (ParseException e)
{
// FIXME: currently no error handling
}
if (mostRecentDate == null)
{
mostRecentDate = parsedDate;
reportFile = reports[i];
}
if (parsedDate != null && parsedDate.compareTo(mostRecentDate) > 0)
{
mostRecentDate = parsedDate;
reportFile = reports[i];
}
}
}
}
// if a date is passed then we want to get the file for that month
if (date != null)
{
String desiredReport = "report-" + date + ".html";
for (int i = 0; i < reports.length; i++)
{
if (reports[i].getName().equals(desiredReport))
{
reportFile = reports[i];
}
}
}
if (reportFile == null)
{
JSPManager.showJSP(request, response, "statistics/no-report.jsp");
}
// finally, build the list of report dates
SimpleDateFormat sdf = new SimpleDateFormat("yyyy'-'M");
for (int i = 0; i < reports.length; i++) for (int i = 0; i < reports.length; i++)
{ {
Matcher matchGeneral = general.matcher(reports[i].getName()); Matcher matchReport = monthly.matcher(reports[i].getName());
if (matchGeneral.matches()) if (matchReport.matches())
{ {
Date parsedDate = null; Date parsedDate = null;
try try
{ {
parsedDate = sdf.parse(matchGeneral.group(1).trim()); parsedDate = sdf.parse(matchReport.group(1).trim());
} }
catch (ParseException e) catch (ParseException e)
{ {
// FIXME: currently no error handling // FIXME: currently no error handling
} }
if (mostRecentDate == null) monthsList.add(parsedDate);
{
mostRecentDate = parsedDate;
reportFile = reports[i];
}
if (parsedDate.compareTo(mostRecentDate) > 0)
{
mostRecentDate = parsedDate;
reportFile = reports[i];
}
} }
} }
}
Date[] months = new Date[monthsList.size()];
// if a date is passed then we want to get the file for that month months = (Date[]) monthsList.toArray(months);
if (date != null)
{ Arrays.sort(months);
String desiredReport = "report-" + date + ".html";
request.setAttribute("months", months);
for (int i = 0; i < reports.length; i++)
try
{ {
if (reports[i].getName().equals(desiredReport)) fir = new FileInputStream(reportFile.getPath());
{ ir = new InputStreamReader(fir, "UTF-8");
reportFile = reports[i]; br = new BufferedReader(ir);
}
} }
} catch (IOException e)
if (reportFile == null)
{
JSPManager.showJSP(request, response, "statistics/no-report.jsp");
}
// finally, build the list of report dates
SimpleDateFormat sdf = new SimpleDateFormat("yyyy'-'M");
for (int i = 0; i < reports.length; i++)
{
Matcher matchReport = monthly.matcher(reports[i].getName());
if (matchReport.matches())
{ {
Date parsedDate = null; // FIXME: no error handing yet
throw new RuntimeException(e.getMessage(),e);
try }
{
parsedDate = sdf.parse(matchReport.group(1).trim()); // FIXME: there's got to be a better way of doing this
} String line = null;
catch (ParseException e) while ((line = br.readLine()) != null)
{ {
// FIXME: currently no error handling report.append(line);
}
monthsList.add(parsedDate);
} }
} }
finally
Date[] months = new Date[monthsList.size()];
months = (Date[]) monthsList.toArray(months);
Arrays.sort(months);
request.setAttribute("months", months);
try
{
fir = new FileInputStream(reportFile.getPath());
ir = new InputStreamReader(fir, "UTF-8");
br = new BufferedReader(ir);
}
catch (IOException e)
{ {
// FIXME: no error handing yet if (br != null)
throw new RuntimeException(e.getMessage(),e); try { br.close(); } catch (IOException ioe) { }
}
if (ir != null)
// FIXME: there's got to be a better way of doing this try { ir.close(); } catch (IOException ioe) { }
StringBuffer report = new StringBuffer();
String line = null; if (fir != null)
while ((line = br.readLine()) != null) try { fir.close(); } catch (IOException ioe) { }
{
report.append(line);
} }
// set the report to be displayed // set the report to be displayed
request.setAttribute("report", report.toString()); request.setAttribute("report", report.toString());

View File

@@ -93,7 +93,7 @@ public class SuggestServlet extends DSpaceServlet
if (item != null) if (item != null)
{ {
DCValue[] titleDC = item.getDC("title", null, Item.ANY); DCValue[] titleDC = item.getDC("title", null, Item.ANY);
if (titleDC != null || titleDC.length > 0) if (titleDC != null && titleDC.length > 0)
{ {
title = titleDC[0].value; title = titleDC[0].value;
} }

View File

@@ -231,8 +231,21 @@ public class DIDLCrosswalk extends Crosswalk
byte[] buffer = new byte[intSize]; byte[] buffer = new byte[intSize];
Context contextl= new Context(); Context contextl= new Context();
BufferedInputStream bis=new BufferedInputStream(BitstreamStorageManager.retrieve(contextl,bitstreams[k].getID())); InputStream is = BitstreamStorageManager.retrieve(contextl,bitstreams[k].getID());
int size=bis.read(buffer); BufferedInputStream bis = new BufferedInputStream(is);
try
{
int size=bis.read(buffer);
}
finally
{
if (bis != null)
try { bis.close(); } catch (IOException ioe) { }
if (is != null)
try { is.close(); } catch (IOException ioe) { }
}
contextl.complete(); contextl.complete();
String encoding = new String(Base64.encodeBase64(buffer), "ASCII"); String encoding = new String(Base64.encodeBase64(buffer), "ASCII");