mirror of
https://github.com/DSpace/DSpace.git
synced 2025-10-07 10:04:21 +00:00
Merge pull request #600 from tdonohue/DS-2080
DS-2080 Cleanup bad/broken tests, and upgrade to latest JUnit and JMockit, also fixes DS-2086
This commit is contained in:
@@ -71,6 +71,23 @@
|
|||||||
</execution>
|
</execution>
|
||||||
</executions>
|
</executions>
|
||||||
</plugin>
|
</plugin>
|
||||||
|
<!-- Verify OS license headers for all source code files -->
|
||||||
|
<plugin>
|
||||||
|
<groupId>com.mycila</groupId>
|
||||||
|
<artifactId>license-maven-plugin</artifactId>
|
||||||
|
<configuration>
|
||||||
|
<excludes>
|
||||||
|
<exclude>**/src/test/resources/**</exclude>
|
||||||
|
<exclude>**/src/test/data/**</exclude>
|
||||||
|
<exclude>**/.gitignore</exclude>
|
||||||
|
<exclude>src/test/data/dspaceFolder/config/spiders/**</exclude>
|
||||||
|
</excludes>
|
||||||
|
</configuration>
|
||||||
|
</plugin>
|
||||||
|
<!-- Unit/Integration Testing setup: This plugin unzips the
|
||||||
|
'testEnvironment.zip' file (created by dspace-parent POM), into
|
||||||
|
the 'target/testing/' folder, to essentially create a test
|
||||||
|
install of DSpace, against which Tests can be run. -->
|
||||||
<plugin>
|
<plugin>
|
||||||
<artifactId>maven-dependency-plugin</artifactId>
|
<artifactId>maven-dependency-plugin</artifactId>
|
||||||
<version>2.8</version>
|
<version>2.8</version>
|
||||||
@@ -96,27 +113,15 @@
|
|||||||
</execution>
|
</execution>
|
||||||
</executions>
|
</executions>
|
||||||
</plugin>
|
</plugin>
|
||||||
<plugin>
|
|
||||||
<groupId>com.mycila</groupId>
|
|
||||||
<artifactId>license-maven-plugin</artifactId>
|
|
||||||
<configuration>
|
|
||||||
<excludes>
|
|
||||||
<exclude>**/src/test/resources/**</exclude>
|
|
||||||
<exclude>**/src/test/data/**</exclude>
|
|
||||||
<exclude>**/.gitignore</exclude>
|
|
||||||
<exclude>src/test/data/dspaceFolder/config/spiders/**</exclude>
|
|
||||||
</excludes>
|
|
||||||
</configuration>
|
|
||||||
</plugin>
|
|
||||||
<!-- This plugin allows us to run a Groovy script in our Maven POM
|
<!-- This plugin allows us to run a Groovy script in our Maven POM
|
||||||
(see: http://gmaven.codehaus.org/Executing+Groovy+Code )
|
(see: http://gmaven.codehaus.org/Executing+Groovy+Code )
|
||||||
We are generating a OS-agnostic version (agnostic.build.dir) of
|
We are generating a OS-agnostic version (agnostic.build.dir) of
|
||||||
the ${project.build.directory} property (full path of target dir).
|
the ${project.build.directory} property (full path of target dir).
|
||||||
This is necessary so that the FileWeaver & Surefire plugins can
|
This is needed by the FileWeaver & Surefire plugins (see below)
|
||||||
use it to initialize the Unit Test Framework's dspace.cfg file.
|
to initialize the Unit Test environment's dspace.cfg file.
|
||||||
Otherwise, the Unit Test Framework will not work on Windows OS.
|
Otherwise, the Unit Test Framework will not work on Windows OS.
|
||||||
This Groovy code was mostly borrowed from:
|
This Groovy code was mostly borrowed from:
|
||||||
http://stackoverflow.com/questions/3872355/how-to-convert-file-separator-in-maven
|
http://stackoverflow.com/questions/3872355/how-to-convert-file-separator-in-maven
|
||||||
-->
|
-->
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.codehaus.gmaven</groupId>
|
<groupId>org.codehaus.gmaven</groupId>
|
||||||
@@ -139,7 +144,9 @@
|
|||||||
</executions>
|
</executions>
|
||||||
</plugin>
|
</plugin>
|
||||||
<!-- FileWeaver plugin is in charge of initializing & "weaving" together
|
<!-- FileWeaver plugin is in charge of initializing & "weaving" together
|
||||||
the dspace.cfg file to be used by the Unit Testing framework -->
|
the dspace.cfg file to be used by the Unit Testing environment.
|
||||||
|
It weaves two files, the default 'dspace.cfg' and 'dspace.cfg.more',
|
||||||
|
both of which are included in the testEnvironment.zip. -->
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>edu.iu.ul.maven.plugins</groupId>
|
<groupId>edu.iu.ul.maven.plugins</groupId>
|
||||||
<artifactId>fileweaver</artifactId>
|
<artifactId>fileweaver</artifactId>
|
||||||
@@ -175,8 +182,10 @@
|
|||||||
</plugin>
|
</plugin>
|
||||||
|
|
||||||
<!--
|
<!--
|
||||||
The ant plugin below ensures that the dspace "woven" configuration file ends up in the dspace directory
|
The ant plugin below ensures that the final "woven" dspace.cfg
|
||||||
The dspace service manager needs this "woven" configuration file when it starts
|
ends up in the /target/testing/dspace/ directory. This becomes
|
||||||
|
our final dspace.cfg for the Unit Testing environment. The dspace
|
||||||
|
service manager needs this "woven" configuration file when it starts.
|
||||||
-->
|
-->
|
||||||
<plugin>
|
<plugin>
|
||||||
<artifactId>maven-antrun-plugin</artifactId>
|
<artifactId>maven-antrun-plugin</artifactId>
|
||||||
@@ -195,14 +204,17 @@
|
|||||||
</executions>
|
</executions>
|
||||||
</plugin>
|
</plugin>
|
||||||
|
|
||||||
|
<!-- Run Unit/Integration Testing! This plugin just kicks off the tests (when enabled). -->
|
||||||
<plugin>
|
<plugin>
|
||||||
<artifactId>maven-surefire-plugin</artifactId>
|
<artifactId>maven-surefire-plugin</artifactId>
|
||||||
<configuration>
|
<configuration>
|
||||||
<systemPropertyVariables>
|
<systemPropertyVariables>
|
||||||
<dspace.dir>${agnostic.build.dir}/testing/dspace</dspace.dir>
|
<!-- Specify the dspace.cfg file to use for test environment -->
|
||||||
<dspace.dir.static>${basedir}/src/test/data/dspaceFolder</dspace.dir.static>
|
|
||||||
<dspace.configuration>${agnostic.build.dir}/testing/dspace/config/dspace.cfg</dspace.configuration>
|
<dspace.configuration>${agnostic.build.dir}/testing/dspace/config/dspace.cfg</dspace.configuration>
|
||||||
<db.schema.path>${agnostic.build.dir}/testing/dspace/etc/h2/database_schema.sql</db.schema.path>
|
<!-- This config tells AbstractUnitTest which database schema to load. We use H2 database,
|
||||||
|
running in Oracle Mode, for Unit Tests. So, we'll load the Oracle schema. -->
|
||||||
|
<db.schema.path>${agnostic.build.dir}/testing/dspace/etc/oracle/database_schema.sql</db.schema.path>
|
||||||
|
<!-- Turn off any DSpace logging -->
|
||||||
<dspace.log.init.disable>true</dspace.log.init.disable>
|
<dspace.log.init.disable>true</dspace.log.init.disable>
|
||||||
</systemPropertyVariables>
|
</systemPropertyVariables>
|
||||||
</configuration>
|
</configuration>
|
||||||
@@ -409,7 +421,7 @@
|
|||||||
<artifactId>dspace-services</artifactId>
|
<artifactId>dspace-services</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency> <!-- Keep jmockit before junit -->
|
<dependency> <!-- Keep jmockit before junit -->
|
||||||
<groupId>com.googlecode.jmockit</groupId>
|
<groupId>org.jmockit</groupId>
|
||||||
<artifactId>jmockit</artifactId>
|
<artifactId>jmockit</artifactId>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
@@ -994,10 +994,15 @@ public class Community extends DSpaceObject
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Remove a collection. Any items then orphaned are deleted.
|
* Remove a collection. If it only belongs to one parent community,
|
||||||
|
* then it is permanently deleted. If it has more than one parent community,
|
||||||
|
* it is simply unmapped from the current community.
|
||||||
*
|
*
|
||||||
* @param c
|
* @param c
|
||||||
* collection to remove
|
* collection to remove
|
||||||
|
* @throws SQLException
|
||||||
|
* @throws AuthorizeException
|
||||||
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
public void removeCollection(Collection c) throws SQLException,
|
public void removeCollection(Collection c) throws SQLException,
|
||||||
AuthorizeException, IOException
|
AuthorizeException, IOException
|
||||||
@@ -1005,149 +1010,185 @@ public class Community extends DSpaceObject
|
|||||||
// Check authorisation
|
// Check authorisation
|
||||||
AuthorizeManager.authorizeAction(ourContext, this, Constants.REMOVE);
|
AuthorizeManager.authorizeAction(ourContext, this, Constants.REMOVE);
|
||||||
|
|
||||||
// will be the collection an orphan?
|
// Do the removal in a try/catch, so that we can rollback on error
|
||||||
TableRow trow = DatabaseManager.querySingle(ourContext,
|
try
|
||||||
"SELECT COUNT(DISTINCT community_id) AS num FROM community2collection WHERE collection_id= ? ",
|
|
||||||
c.getID());
|
|
||||||
DatabaseManager.setConstraintDeferred(ourContext, "comm2coll_collection_fk");
|
|
||||||
|
|
||||||
if (trow.getLongColumn("num") == 1)
|
|
||||||
{
|
{
|
||||||
// Orphan; delete it
|
// Capture ID & Handle of Collection we are removing, so we can trigger events later
|
||||||
c.delete();
|
int removedId = c.getID();
|
||||||
}
|
String removedHandle = c.getHandle();
|
||||||
|
|
||||||
log.info(LogManager.getHeader(ourContext, "remove_collection",
|
|
||||||
"community_id=" + getID() + ",collection_id=" + c.getID()));
|
|
||||||
|
|
||||||
// Remove any mappings
|
|
||||||
DatabaseManager.updateQuery(ourContext,
|
|
||||||
"DELETE FROM community2collection WHERE community_id= ? "+
|
|
||||||
"AND collection_id= ? ", getID(), c.getID());
|
|
||||||
|
|
||||||
DatabaseManager.setConstraintImmediate(ourContext, "comm2coll_collection_fk");
|
// How many parent(s) does this collection have?
|
||||||
|
TableRow trow = DatabaseManager.querySingle(ourContext,
|
||||||
|
"SELECT COUNT(DISTINCT community_id) AS num FROM community2collection WHERE collection_id= ? ",
|
||||||
|
c.getID());
|
||||||
|
long numParents = trow.getLongColumn("num");
|
||||||
|
|
||||||
|
// Remove the parent/child mapping with this collection
|
||||||
|
// We do this before deletion, so that the deletion doesn't throw database integrity violations
|
||||||
|
DatabaseManager.updateQuery(ourContext,
|
||||||
|
"DELETE FROM community2collection WHERE community_id= ? "+
|
||||||
|
"AND collection_id= ? ", getID(), c.getID());
|
||||||
|
|
||||||
|
// As long as this Collection only had one parent, delete it
|
||||||
|
// NOTE: if it had multiple parents, we will keep it around,
|
||||||
|
// and just remove that single parent/child mapping
|
||||||
|
if (numParents == 1)
|
||||||
|
{
|
||||||
|
c.delete();
|
||||||
|
}
|
||||||
|
|
||||||
ourContext.addEvent(new Event(Event.REMOVE, Constants.COMMUNITY, getID(), Constants.COLLECTION, c.getID(), c.getHandle()));
|
// log the removal & trigger any associated event(s)
|
||||||
|
log.info(LogManager.getHeader(ourContext, "remove_collection",
|
||||||
|
"community_id=" + getID() + ",collection_id=" + removedId));
|
||||||
|
|
||||||
|
ourContext.addEvent(new Event(Event.REMOVE, Constants.COMMUNITY, getID(), Constants.COLLECTION, removedId, removedHandle));
|
||||||
|
}
|
||||||
|
catch(SQLException|IOException e)
|
||||||
|
{
|
||||||
|
// Immediately abort the deletion, rolling back the transaction
|
||||||
|
ourContext.abort();
|
||||||
|
// Pass exception upwards for additional reporting/handling as needed
|
||||||
|
throw e;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Remove a subcommunity. Any substructure then orphaned is deleted.
|
* Remove a subcommunity. If it only belongs to one parent community,
|
||||||
|
* then it is permanently deleted. If it has more than one parent community,
|
||||||
|
* it is simply unmapped from the current community.
|
||||||
*
|
*
|
||||||
* @param c
|
* @param c
|
||||||
* subcommunity to remove
|
* subcommunity to remove
|
||||||
|
* @throws SQLException
|
||||||
|
* @throws AuthorizeException
|
||||||
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
public void removeSubcommunity(Community c) throws SQLException,
|
public void removeSubcommunity(Community c) throws SQLException,
|
||||||
AuthorizeException, IOException
|
AuthorizeException, IOException
|
||||||
{
|
{
|
||||||
// Check authorisation
|
// Check authorisation.
|
||||||
AuthorizeManager.authorizeAction(ourContext, this, Constants.REMOVE);
|
AuthorizeManager.authorizeAction(ourContext, this, Constants.REMOVE);
|
||||||
|
|
||||||
// will be the subcommunity an orphan?
|
// Do the removal in a try/catch, so that we can rollback on error
|
||||||
TableRow trow = DatabaseManager.querySingle(ourContext,
|
try
|
||||||
"SELECT COUNT(DISTINCT parent_comm_id) AS num FROM community2community WHERE child_comm_id= ? ",
|
|
||||||
c.getID());
|
|
||||||
|
|
||||||
DatabaseManager.setConstraintDeferred(ourContext, "com2com_child_fk");
|
|
||||||
if (trow.getLongColumn("num") == 1)
|
|
||||||
{
|
{
|
||||||
// Orphan; delete it
|
// Capture ID & Handle of Community we are removing, so we can trigger events later
|
||||||
c.rawDelete();
|
int removedId = c.getID();
|
||||||
|
String removedHandle = c.getHandle();
|
||||||
|
|
||||||
|
// How many parent(s) does this subcommunity have?
|
||||||
|
TableRow trow = DatabaseManager.querySingle(ourContext,
|
||||||
|
"SELECT COUNT(DISTINCT parent_comm_id) AS num FROM community2community WHERE child_comm_id= ? ",
|
||||||
|
c.getID());
|
||||||
|
long numParents = trow.getLongColumn("num");
|
||||||
|
|
||||||
|
// Remove the parent/child mapping with this subcommunity
|
||||||
|
// We do this before deletion, so that the deletion doesn't throw database integrity violations
|
||||||
|
DatabaseManager.updateQuery(ourContext,
|
||||||
|
"DELETE FROM community2community WHERE parent_comm_id= ? " +
|
||||||
|
" AND child_comm_id= ? ", getID(),c.getID());
|
||||||
|
|
||||||
|
// As long as this Community only had one parent, delete it
|
||||||
|
// NOTE: if it had multiple parents, we will keep it around,
|
||||||
|
// and just remove that single parent/child mapping
|
||||||
|
if (numParents == 1)
|
||||||
|
{
|
||||||
|
c.rawDelete();
|
||||||
|
}
|
||||||
|
|
||||||
|
// log the removal & trigger any related event(s)
|
||||||
|
log.info(LogManager.getHeader(ourContext, "remove_subcommunity",
|
||||||
|
"parent_comm_id=" + getID() + ",child_comm_id=" + removedId));
|
||||||
|
|
||||||
|
ourContext.addEvent(new Event(Event.REMOVE, Constants.COMMUNITY, getID(), Constants.COMMUNITY, removedId, removedHandle));
|
||||||
|
}
|
||||||
|
catch(SQLException|IOException e)
|
||||||
|
{
|
||||||
|
// Immediately abort the deletion, rolling back the transaction
|
||||||
|
ourContext.abort();
|
||||||
|
// Pass exception upwards for additional reporting/handling as needed
|
||||||
|
throw e;
|
||||||
}
|
}
|
||||||
|
|
||||||
log.info(LogManager.getHeader(ourContext, "remove_subcommunity",
|
|
||||||
"parent_comm_id=" + getID() + ",child_comm_id=" + c.getID()));
|
|
||||||
|
|
||||||
// Remove any mappings
|
|
||||||
DatabaseManager.updateQuery(ourContext,
|
|
||||||
"DELETE FROM community2community WHERE parent_comm_id= ? " +
|
|
||||||
" AND child_comm_id= ? ", getID(),c.getID());
|
|
||||||
|
|
||||||
ourContext.addEvent(new Event(Event.REMOVE, Constants.COMMUNITY, getID(), Constants.COMMUNITY, c.getID(), c.getHandle()));
|
|
||||||
|
|
||||||
DatabaseManager.setConstraintImmediate(ourContext, "com2com_child_fk");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Delete the community, including the metadata and logo. Collections and
|
* Delete the community, including the metadata and logo. Any children
|
||||||
* subcommunities that are then orphans are deleted.
|
* (subcommunities or collections) are also deleted.
|
||||||
|
*
|
||||||
|
* @throws SQLException
|
||||||
|
* @throws AuthorizeException
|
||||||
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
public void delete() throws SQLException, AuthorizeException, IOException
|
public void delete() throws SQLException, AuthorizeException, IOException
|
||||||
{
|
{
|
||||||
// Check authorisation
|
// Check for a parent Community
|
||||||
// FIXME: If this was a subcommunity, it is first removed from it's
|
Community parent = getParentCommunity();
|
||||||
// parent.
|
|
||||||
// This means the parentCommunity == null
|
// Check authorisation.
|
||||||
// But since this is also the case for top-level communities, we would
|
// MUST have either REMOVE permissions on parent community (if exists)
|
||||||
// give everyone rights to remove the top-level communities.
|
// OR have DELETE permissions on current community
|
||||||
// The same problem occurs in removing the logo
|
if (parent!= null && !AuthorizeManager.authorizeActionBoolean(ourContext,
|
||||||
if (!AuthorizeManager.authorizeActionBoolean(ourContext,
|
parent, Constants.REMOVE))
|
||||||
getParentCommunity(), Constants.REMOVE))
|
|
||||||
{
|
{
|
||||||
|
// If we don't have Parent Community REMOVE permissions, then
|
||||||
|
// we MUST at least have current Community DELETE permissions
|
||||||
AuthorizeManager
|
AuthorizeManager
|
||||||
.authorizeAction(ourContext, this, Constants.DELETE);
|
.authorizeAction(ourContext, this, Constants.DELETE);
|
||||||
}
|
}
|
||||||
|
|
||||||
// If not a top-level community, have parent remove me; this
|
// Check if this is a top-level Community or not
|
||||||
// will call rawDelete() before removing the linkage
|
|
||||||
Community parent = getParentCommunity();
|
|
||||||
|
|
||||||
if (parent == null)
|
if (parent == null)
|
||||||
{
|
{
|
||||||
// if removing a top level Community, simulate a REMOVE event at the Site.
|
// Call rawDelete to clean up all sub-communities & collections
|
||||||
if (getParentCommunity() == null)
|
// under this Community, then delete the Community itself
|
||||||
{
|
rawDelete();
|
||||||
ourContext.addEvent(new Event(Event.REMOVE, Constants.SITE, Site.SITE_ID,
|
|
||||||
Constants.COMMUNITY, getID(), getHandle()));
|
// Since this is a top level Community, simulate a REMOVE event at the Site.
|
||||||
}
|
ourContext.addEvent(new Event(Event.REMOVE, Constants.SITE, Site.SITE_ID,
|
||||||
|
Constants.COMMUNITY, getID(), getHandle()));
|
||||||
} else {
|
} else {
|
||||||
// remove the subcommunities first
|
// This is a subcommunity, so let the parent remove it
|
||||||
Community[] subcommunities = getSubcommunities();
|
// NOTE: this essentially just logs event and calls "rawDelete()"
|
||||||
for (int i = 0; i < subcommunities.length; i++)
|
|
||||||
{
|
|
||||||
subcommunities[i].delete();
|
|
||||||
}
|
|
||||||
// now let the parent remove the community
|
|
||||||
parent.removeSubcommunity(this);
|
parent.removeSubcommunity(this);
|
||||||
|
|
||||||
return;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
rawDelete();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Internal method to remove the community and all its childs from the database without aware of eventually parent
|
* Internal method to remove the community and all its children from the
|
||||||
|
* database, and perform any pre/post-cleanup
|
||||||
|
*
|
||||||
|
* @throws SQLException
|
||||||
|
* @throws AuthorizeException
|
||||||
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
private void rawDelete() throws SQLException, AuthorizeException, IOException
|
private void rawDelete() throws SQLException, AuthorizeException, IOException
|
||||||
{
|
{
|
||||||
log.info(LogManager.getHeader(ourContext, "delete_community",
|
log.info(LogManager.getHeader(ourContext, "delete_community",
|
||||||
"community_id=" + getID()));
|
"community_id=" + getID()));
|
||||||
|
|
||||||
ourContext.addEvent(new Event(Event.DELETE, Constants.COMMUNITY, getID(), getHandle()));
|
// Capture ID & Handle of object we are removing, so we can trigger events later
|
||||||
|
int deletedId = getID();
|
||||||
|
String deletedHandle = getHandle();
|
||||||
|
|
||||||
// Remove from cache
|
// Remove Community object from cache
|
||||||
ourContext.removeCached(this, getID());
|
ourContext.removeCached(this, getID());
|
||||||
|
|
||||||
// Remove collections
|
// Remove any collections directly under this Community
|
||||||
Collection[] cols = getCollections();
|
for (Collection collection : getCollections())
|
||||||
|
|
||||||
for (int i = 0; i < cols.length; i++)
|
|
||||||
{
|
{
|
||||||
removeCollection(cols[i]);
|
removeCollection(collection);
|
||||||
}
|
}
|
||||||
|
|
||||||
// delete subcommunities
|
// Remove any SubCommunities under this Community
|
||||||
Community[] comms = getSubcommunities();
|
for (Community community : getSubcommunities())
|
||||||
|
|
||||||
for (int j = 0; j < comms.length; j++)
|
|
||||||
{
|
{
|
||||||
comms[j].delete();
|
removeSubcommunity(community);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Remove the logo
|
// Remove the Community logo
|
||||||
setLogo(null);
|
setLogo(null);
|
||||||
|
|
||||||
// Remove all authorization policies
|
// Remove all associated authorization policies
|
||||||
AuthorizeManager.removeAllPolicies(ourContext, this);
|
AuthorizeManager.removeAllPolicies(ourContext, this);
|
||||||
|
|
||||||
// get rid of the content count cache if it exists
|
// get rid of the content count cache if it exists
|
||||||
@@ -1163,19 +1204,22 @@ public class Community extends DSpaceObject
|
|||||||
throw new IllegalStateException(e.getMessage(),e);
|
throw new IllegalStateException(e.getMessage(),e);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Remove any Handle
|
// Unbind any handle associated with this Community
|
||||||
HandleManager.unbindHandle(ourContext, this);
|
HandleManager.unbindHandle(ourContext, this);
|
||||||
|
|
||||||
// Delete community row
|
// Delete community row (which actually removes the Community)
|
||||||
DatabaseManager.delete(ourContext, communityRow);
|
DatabaseManager.delete(ourContext, communityRow);
|
||||||
|
|
||||||
// Remove administrators group - must happen after deleting community
|
// Remove Community administrators group (if exists)
|
||||||
|
// NOTE: this must happen AFTER deleting community
|
||||||
Group g = getAdministrators();
|
Group g = getAdministrators();
|
||||||
|
|
||||||
if (g != null)
|
if (g != null)
|
||||||
{
|
{
|
||||||
g.delete();
|
g.delete();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// If everything above worked, then trigger any associated events
|
||||||
|
ourContext.addEvent(new Event(Event.DELETE, Constants.COMMUNITY, deletedId, deletedHandle));
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@@ -49,7 +49,7 @@ import org.dspace.core.Context;
|
|||||||
public class DatabaseManager
|
public class DatabaseManager
|
||||||
{
|
{
|
||||||
/** log4j category */
|
/** log4j category */
|
||||||
private static Logger log = Logger.getLogger(DatabaseManager.class);
|
private static final Logger log = Logger.getLogger(DatabaseManager.class);
|
||||||
|
|
||||||
/** True if initialization has been done */
|
/** True if initialization has been done */
|
||||||
private static boolean initialized = false;
|
private static boolean initialized = false;
|
||||||
@@ -1077,6 +1077,7 @@ public class DatabaseManager
|
|||||||
|
|
||||||
switch (jdbctype)
|
switch (jdbctype)
|
||||||
{
|
{
|
||||||
|
case Types.BOOLEAN:
|
||||||
case Types.BIT:
|
case Types.BIT:
|
||||||
row.setColumn(name, results.getBoolean(i));
|
row.setColumn(name, results.getBoolean(i));
|
||||||
break;
|
break;
|
||||||
@@ -1581,6 +1582,7 @@ public class DatabaseManager
|
|||||||
{
|
{
|
||||||
switch (jdbctype)
|
switch (jdbctype)
|
||||||
{
|
{
|
||||||
|
case Types.BOOLEAN:
|
||||||
case Types.BIT:
|
case Types.BIT:
|
||||||
statement.setBoolean(count, row.getBooleanColumn(column));
|
statement.setBoolean(count, row.getBooleanColumn(column));
|
||||||
break;
|
break;
|
||||||
|
@@ -1,8 +1,10 @@
|
|||||||
#Configure authority control for testing
|
# Configure authority control for Unit Testing
|
||||||
|
# (This overrides default, commented out settings in dspace.cfg)
|
||||||
plugin.selfnamed.org.dspace.content.authority.ChoiceAuthority = \
|
plugin.selfnamed.org.dspace.content.authority.ChoiceAuthority = \
|
||||||
org.dspace.content.authority.DCInputAuthority
|
org.dspace.content.authority.DCInputAuthority, \
|
||||||
|
org.dspace.content.authority.DSpaceControlledVocabulary
|
||||||
|
|
||||||
|
# Enable a test authority control on dc.language.iso field
|
||||||
choices.plugin.dc.language.iso = common_iso_languages
|
choices.plugin.dc.language.iso = common_iso_languages
|
||||||
choices.presentation.dc.language.iso = select
|
choices.presentation.dc.language.iso = select
|
||||||
authority.controlled.dc.language.iso = true
|
authority.controlled.dc.language.iso = true
|
||||||
|
@@ -7,39 +7,40 @@
|
|||||||
*/
|
*/
|
||||||
package org.dspace;
|
package org.dspace;
|
||||||
|
|
||||||
|
import java.io.BufferedReader;
|
||||||
import static org.junit.Assert.fail;
|
import static org.junit.Assert.fail;
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
|
import java.io.FileReader;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.net.URL;
|
import java.net.URL;
|
||||||
|
import java.sql.Connection;
|
||||||
import java.sql.SQLException;
|
import java.sql.SQLException;
|
||||||
|
import java.sql.Statement;
|
||||||
import java.util.Properties;
|
import java.util.Properties;
|
||||||
import java.util.TimeZone;
|
import java.util.TimeZone;
|
||||||
|
|
||||||
import javax.xml.parsers.ParserConfigurationException;
|
import javax.xml.parsers.ParserConfigurationException;
|
||||||
import javax.xml.transform.TransformerException;
|
import javax.xml.transform.TransformerException;
|
||||||
|
|
||||||
import mockit.UsingMocksAndStubs;
|
|
||||||
|
|
||||||
import org.apache.log4j.Logger;
|
import org.apache.log4j.Logger;
|
||||||
import org.dspace.administer.MetadataImporter;
|
import org.dspace.administer.MetadataImporter;
|
||||||
import org.dspace.administer.RegistryImportException;
|
import org.dspace.administer.RegistryImportException;
|
||||||
import org.dspace.administer.RegistryLoader;
|
import org.dspace.administer.RegistryLoader;
|
||||||
import org.dspace.authorize.AuthorizeException;
|
import org.dspace.authorize.AuthorizeException;
|
||||||
import org.dspace.browse.BrowseException;
|
|
||||||
import org.dspace.browse.IndexBrowse;
|
|
||||||
import org.dspace.browse.MockBrowseCreateDAOOracle;
|
|
||||||
import org.dspace.content.MetadataField;
|
import org.dspace.content.MetadataField;
|
||||||
import org.dspace.content.NonUniqueMetadataException;
|
import org.dspace.content.NonUniqueMetadataException;
|
||||||
import org.dspace.core.ConfigurationManager;
|
import org.dspace.core.ConfigurationManager;
|
||||||
import org.dspace.core.Context;
|
import org.dspace.core.Context;
|
||||||
import org.dspace.core.I18nUtil;
|
import org.dspace.core.I18nUtil;
|
||||||
|
import org.dspace.discovery.IndexingService;
|
||||||
import org.dspace.discovery.MockIndexEventConsumer;
|
import org.dspace.discovery.MockIndexEventConsumer;
|
||||||
import org.dspace.eperson.EPerson;
|
import org.dspace.eperson.EPerson;
|
||||||
import org.dspace.search.DSIndexer;
|
|
||||||
import org.dspace.servicemanager.DSpaceKernelImpl;
|
import org.dspace.servicemanager.DSpaceKernelImpl;
|
||||||
import org.dspace.servicemanager.DSpaceKernelInit;
|
import org.dspace.servicemanager.DSpaceKernelInit;
|
||||||
|
import org.dspace.storage.rdbms.DatabaseManager;
|
||||||
import org.dspace.storage.rdbms.MockDatabaseManager;
|
import org.dspace.storage.rdbms.MockDatabaseManager;
|
||||||
|
import org.dspace.utils.DSpace;
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.AfterClass;
|
import org.junit.AfterClass;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
@@ -54,7 +55,6 @@ import org.xml.sax.SAXException;
|
|||||||
*
|
*
|
||||||
* @author pvillega
|
* @author pvillega
|
||||||
*/
|
*/
|
||||||
@UsingMocksAndStubs({MockDatabaseManager.class, MockBrowseCreateDAOOracle.class, MockIndexEventConsumer.class})
|
|
||||||
public class AbstractUnitTest
|
public class AbstractUnitTest
|
||||||
{
|
{
|
||||||
/** log4j category */
|
/** log4j category */
|
||||||
@@ -80,8 +80,11 @@ public class AbstractUnitTest
|
|||||||
protected static EPerson eperson;
|
protected static EPerson eperson;
|
||||||
|
|
||||||
protected static DSpaceKernelImpl kernelImpl;
|
protected static DSpaceKernelImpl kernelImpl;
|
||||||
|
|
||||||
|
// Whether the in-memory DB has been initiailzed for testing
|
||||||
|
protected static boolean dbInitialized = false;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This method will be run before the first test as per @BeforeClass. It will
|
* This method will be run before the first test as per @BeforeClass. It will
|
||||||
* initialize resources required for the tests.
|
* initialize resources required for the tests.
|
||||||
*
|
*
|
||||||
@@ -119,9 +122,14 @@ public class AbstractUnitTest
|
|||||||
{
|
{
|
||||||
kernelImpl.start(ConfigurationManager.getProperty("dspace.dir"));
|
kernelImpl.start(ConfigurationManager.getProperty("dspace.dir"));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Start the mock database
|
// Applies/initializes our mock database by invoking its constructor:
|
||||||
new MockDatabaseManager();
|
new MockDatabaseManager();
|
||||||
|
// Now, initialize our Database itself by populating it
|
||||||
|
initializeDB();
|
||||||
|
|
||||||
|
// Also initialize these mock classes for general use
|
||||||
|
new MockIndexEventConsumer();
|
||||||
|
|
||||||
// Load the default registries. This assumes the temporary
|
// Load the default registries. This assumes the temporary
|
||||||
// filesystem is working and the in-memory DB in place.
|
// filesystem is working and the in-memory DB in place.
|
||||||
@@ -156,15 +164,10 @@ public class AbstractUnitTest
|
|||||||
}
|
}
|
||||||
|
|
||||||
//Create search and browse indexes
|
//Create search and browse indexes
|
||||||
DSIndexer.cleanIndex(ctx);
|
DSpace dspace = new DSpace();
|
||||||
DSIndexer.createIndex(ctx);
|
IndexingService indexer = dspace.getServiceManager().getServiceByName(IndexingService.class.getName(),IndexingService.class);
|
||||||
|
indexer.createIndex(ctx);
|
||||||
ctx.commit();
|
ctx.commit();
|
||||||
|
|
||||||
//indexer does a 'complete' on the context
|
|
||||||
IndexBrowse indexer = new IndexBrowse(ctx);
|
|
||||||
indexer.setRebuild(true);
|
|
||||||
indexer.setExecute(true);
|
|
||||||
indexer.initBrowse();
|
|
||||||
}
|
}
|
||||||
ctx.restoreAuthSystemState();
|
ctx.restoreAuthSystemState();
|
||||||
if(ctx.isValid())
|
if(ctx.isValid())
|
||||||
@@ -173,11 +176,6 @@ public class AbstractUnitTest
|
|||||||
}
|
}
|
||||||
ctx = null;
|
ctx = null;
|
||||||
}
|
}
|
||||||
catch (BrowseException ex)
|
|
||||||
{
|
|
||||||
log.error("Error creating the browse indexes", ex);
|
|
||||||
fail("Error creating the browse indexes");
|
|
||||||
}
|
|
||||||
catch (RegistryImportException ex)
|
catch (RegistryImportException ex)
|
||||||
{
|
{
|
||||||
log.error("Error loading default data", ex);
|
log.error("Error loading default data", ex);
|
||||||
@@ -398,4 +396,88 @@ public class AbstractUnitTest
|
|||||||
throw new Exception("Fail!");
|
throw new Exception("Fail!");
|
||||||
}
|
}
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create the database tables by running the schema SQL specified
|
||||||
|
* in the 'db.schema.path' system property
|
||||||
|
*/
|
||||||
|
private static void initializeDB() throws SQLException
|
||||||
|
{
|
||||||
|
if(!dbInitialized)
|
||||||
|
{
|
||||||
|
String schemaPath = "";
|
||||||
|
Connection dbConnection = null;
|
||||||
|
try
|
||||||
|
{
|
||||||
|
//preload the contents of the database
|
||||||
|
String s = new String();
|
||||||
|
StringBuilder sb = new StringBuilder();
|
||||||
|
|
||||||
|
schemaPath = System.getProperty("db.schema.path");
|
||||||
|
if (schemaPath == null)
|
||||||
|
throw new IllegalArgumentException(
|
||||||
|
"System property db.schema.path must be defined");
|
||||||
|
|
||||||
|
log.debug("Preloading Unit Test database from " + schemaPath);
|
||||||
|
|
||||||
|
FileReader fr = new FileReader(new File(schemaPath));
|
||||||
|
BufferedReader br = new BufferedReader(fr);
|
||||||
|
|
||||||
|
while((s = br.readLine()) != null)
|
||||||
|
{
|
||||||
|
//we skip white lines and comments
|
||||||
|
if(!"".equals(s.trim()) && !s.trim().startsWith("--"))
|
||||||
|
{
|
||||||
|
sb.append(s);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
br.close();
|
||||||
|
|
||||||
|
//we use ";" as a delimiter for each request. This assumes no triggers
|
||||||
|
//nor other calls besides CREATE TABLE, CREATE SEQUENCE and INSERT
|
||||||
|
//exist in the file
|
||||||
|
String[] stmts = sb.toString().split(";");
|
||||||
|
|
||||||
|
// Get a new database connection. This also initializes underlying DatabaseManager object
|
||||||
|
dbConnection = DatabaseManager.getConnection();
|
||||||
|
Statement st = dbConnection.createStatement();
|
||||||
|
|
||||||
|
for(int i = 0; i<stmts.length; i++)
|
||||||
|
{
|
||||||
|
// we ensure that there is no spaces before or after the request string
|
||||||
|
// in order to not execute empty statements
|
||||||
|
if(!stmts[i].trim().equals(""))
|
||||||
|
{
|
||||||
|
st.executeUpdate(stmts[i]);
|
||||||
|
log.debug("Loading into database: "+stmts[i]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Commit all DB changes & mark DB as initialized
|
||||||
|
dbConnection.commit();
|
||||||
|
dbInitialized = true;
|
||||||
|
}
|
||||||
|
catch (IOException e)
|
||||||
|
{
|
||||||
|
// Re-throw as a SQL exception... but note that it's a problem with the Schema file
|
||||||
|
throw new SQLException("Unable to create test database from file '" + schemaPath + "'", e);
|
||||||
|
}
|
||||||
|
finally
|
||||||
|
{
|
||||||
|
if (dbConnection!=null)
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
// close out our open DB connection
|
||||||
|
dbConnection.close();
|
||||||
|
}
|
||||||
|
catch(SQLException se)
|
||||||
|
{
|
||||||
|
//ignore if we cannot close
|
||||||
|
}
|
||||||
|
dbConnection = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@@ -1,56 +0,0 @@
|
|||||||
/**
|
|
||||||
* The contents of this file are subject to the license and copyright
|
|
||||||
* detailed in the LICENSE and NOTICE files at the root of the source
|
|
||||||
* tree and available online at
|
|
||||||
*
|
|
||||||
* http://www.dspace.org/license/
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.dspace;
|
|
||||||
|
|
||||||
import java.util.Properties;
|
|
||||||
import mockit.Mock;
|
|
||||||
import mockit.MockUp;
|
|
||||||
import org.dspace.core.ConfigurationManager;
|
|
||||||
import org.slf4j.Logger;
|
|
||||||
import org.slf4j.LoggerFactory;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Dummy ConfigurationManager with a setter instead of external storage for
|
|
||||||
* values. Call {@link setProperty} to create configuration.
|
|
||||||
*
|
|
||||||
* <p>Please note that this implementation is incomplete!</p>
|
|
||||||
*
|
|
||||||
* @author mwood
|
|
||||||
*/
|
|
||||||
public class MockConfigurationManager
|
|
||||||
extends MockUp<ConfigurationManager>
|
|
||||||
{
|
|
||||||
private static final Properties properties = new Properties();
|
|
||||||
private static final Logger log = LoggerFactory.getLogger(MockConfigurationManager.class);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Set a value in the configuration map.
|
|
||||||
*
|
|
||||||
* @param key name of the configuration datum.
|
|
||||||
* @param value value to be assigned to the name.
|
|
||||||
*/
|
|
||||||
public static void setProperty(String key, String value)
|
|
||||||
{
|
|
||||||
log.info("setProperty({}, {});", key, value);
|
|
||||||
properties.setProperty(key, value);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Fetch a value from the map.
|
|
||||||
*
|
|
||||||
* @param key name of the configuration property desired.
|
|
||||||
* @return value bound to that name, or null if not set.
|
|
||||||
*/
|
|
||||||
@Mock
|
|
||||||
public static String getProperty(String key)
|
|
||||||
{
|
|
||||||
log.info("getProperty({});", key);
|
|
||||||
return properties.getProperty(key);
|
|
||||||
}
|
|
||||||
}
|
|
@@ -12,7 +12,6 @@ import java.util.ArrayList;
|
|||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import org.dspace.AbstractUnitTest;
|
import org.dspace.AbstractUnitTest;
|
||||||
import org.dspace.core.Context;
|
|
||||||
|
|
||||||
import org.junit.*;
|
import org.junit.*;
|
||||||
import static org.junit.Assert.* ;
|
import static org.junit.Assert.* ;
|
||||||
@@ -31,34 +30,6 @@ public class DSpaceCSVTest extends AbstractUnitTest
|
|||||||
/** log4j category */
|
/** log4j category */
|
||||||
private static final Logger log = Logger.getLogger(DSpaceCSVTest.class);
|
private static final Logger log = Logger.getLogger(DSpaceCSVTest.class);
|
||||||
|
|
||||||
/**
|
|
||||||
* This method will be run before every test as per @Before. It will
|
|
||||||
* initialize resources required for the tests.
|
|
||||||
*
|
|
||||||
* Other methods can be annotated with @Before here or in subclasses
|
|
||||||
* but no execution order is guaranteed
|
|
||||||
*/
|
|
||||||
@Before
|
|
||||||
@Override
|
|
||||||
public void init()
|
|
||||||
{
|
|
||||||
super.init();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This method will be run after every test as per @After. It will
|
|
||||||
* clean resources initialized by the @Before methods.
|
|
||||||
*
|
|
||||||
* Other methods can be annotated with @After here or in subclasses
|
|
||||||
* but no execution order is guaranteed
|
|
||||||
*/
|
|
||||||
@After
|
|
||||||
@Override
|
|
||||||
public void destroy()
|
|
||||||
{
|
|
||||||
super.destroy();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test the reading and parsing of CSV files
|
* Test the reading and parsing of CSV files
|
||||||
*/
|
*/
|
||||||
@@ -85,9 +56,9 @@ public class DSpaceCSVTest extends AbstractUnitTest
|
|||||||
}
|
}
|
||||||
out.flush();
|
out.flush();
|
||||||
out.close();
|
out.close();
|
||||||
|
|
||||||
// Test the CSV parsing was OK
|
// Test the CSV parsing was OK
|
||||||
Context c = new Context();
|
DSpaceCSV dcsv = new DSpaceCSV(new File(filename), context);
|
||||||
DSpaceCSV dcsv = new DSpaceCSV(new File(filename), c);
|
|
||||||
String[] lines = dcsv.getCSVLinesAsStringArray();
|
String[] lines = dcsv.getCSVLinesAsStringArray();
|
||||||
assertThat("testDSpaceCSV Good CSV", lines.length, equalTo(7));
|
assertThat("testDSpaceCSV Good CSV", lines.length, equalTo(7));
|
||||||
|
|
||||||
@@ -111,16 +82,18 @@ public class DSpaceCSVTest extends AbstractUnitTest
|
|||||||
}
|
}
|
||||||
out.flush();
|
out.flush();
|
||||||
out.close();
|
out.close();
|
||||||
|
|
||||||
// Test the CSV parsing was OK
|
// Test the CSV parsing was OK
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
dcsv = new DSpaceCSV(new File(filename), c);
|
dcsv = new DSpaceCSV(new File(filename), context);
|
||||||
lines = dcsv.getCSVLinesAsStringArray();
|
lines = dcsv.getCSVLinesAsStringArray();
|
||||||
|
|
||||||
fail("An exception should have been thrown due to bad CSV");
|
fail("An exception should have been thrown due to bad CSV");
|
||||||
} catch (Exception e)
|
}
|
||||||
|
catch (Exception e)
|
||||||
{
|
{
|
||||||
assertThat("testDSpaceCSV Bad heading CSV", e.getMessage(), equalTo("Unknown metadata element in row 4: dc.contributor.foobar"));
|
assertThat("testDSpaceCSV Bad heading CSV", e.getMessage(), equalTo("Unknown metadata element in row 4: dc.contributor.foobar"));
|
||||||
}
|
}
|
||||||
lines = dcsv.getCSVLinesAsStringArray();
|
lines = dcsv.getCSVLinesAsStringArray();
|
||||||
assertThat("testDSpaceCSV Good CSV", lines.length, equalTo(7));
|
assertThat("testDSpaceCSV Good CSV", lines.length, equalTo(7));
|
||||||
@@ -138,14 +111,16 @@ public class DSpaceCSVTest extends AbstractUnitTest
|
|||||||
}
|
}
|
||||||
out.flush();
|
out.flush();
|
||||||
out.close();
|
out.close();
|
||||||
|
|
||||||
// Test the CSV parsing was OK
|
// Test the CSV parsing was OK
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
dcsv = new DSpaceCSV(new File(filename), c);
|
dcsv = new DSpaceCSV(new File(filename), context);
|
||||||
lines = dcsv.getCSVLinesAsStringArray();
|
lines = dcsv.getCSVLinesAsStringArray();
|
||||||
|
|
||||||
fail("An exception should have been thrown due to bad CSV");
|
fail("An exception should have been thrown due to bad CSV");
|
||||||
} catch (Exception e)
|
}
|
||||||
|
catch (Exception e)
|
||||||
{
|
{
|
||||||
assertThat("testDSpaceCSV Bad heading CSV", e.getMessage(), equalTo("Unknown metadata schema in row 3: dcdc.title"));
|
assertThat("testDSpaceCSV Bad heading CSV", e.getMessage(), equalTo("Unknown metadata schema in row 3: dcdc.title"));
|
||||||
}
|
}
|
||||||
|
@@ -25,7 +25,7 @@ import static org.junit.Assert.assertTrue;
|
|||||||
* @author Ben Bosman
|
* @author Ben Bosman
|
||||||
* @author Roeland Dillen
|
* @author Roeland Dillen
|
||||||
*/
|
*/
|
||||||
public class IPMatcherTest extends AbstractUnitTest
|
public class IPMatcherTest
|
||||||
{
|
{
|
||||||
private static final String IP6_FULL_ADDRESS1 = "2001:18e8:3:171:218:8bff:fe2a:56a4";
|
private static final String IP6_FULL_ADDRESS1 = "2001:18e8:3:171:218:8bff:fe2a:56a4";
|
||||||
private static final String IP6_FULL_ADDRESS2 = "2001:18e8:3:171:218:8bff:fe2a:56a3";
|
private static final String IP6_FULL_ADDRESS2 = "2001:18e8:3:171:218:8bff:fe2a:56a3";
|
||||||
|
File diff suppressed because it is too large
Load Diff
@@ -194,13 +194,11 @@ public class BitstreamFormatTest extends AbstractUnitTest
|
|||||||
@Test
|
@Test
|
||||||
public void testCreateAdmin() throws SQLException,AuthorizeException
|
public void testCreateAdmin() throws SQLException,AuthorizeException
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
{
|
{{
|
||||||
AuthorizeManager authManager;
|
// Allow full Admin perms
|
||||||
{
|
AuthorizeManager.isAdmin((Context)any); result = true;
|
||||||
AuthorizeManager.isAdmin((Context)any); result = true;
|
}};
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
BitstreamFormat found = BitstreamFormat.create(context);
|
BitstreamFormat found = BitstreamFormat.create(context);
|
||||||
assertThat("testCreate 0", found, notNullValue());
|
assertThat("testCreate 0", found, notNullValue());
|
||||||
@@ -216,13 +214,11 @@ public class BitstreamFormatTest extends AbstractUnitTest
|
|||||||
@Test(expected=AuthorizeException.class)
|
@Test(expected=AuthorizeException.class)
|
||||||
public void testCreateNotAdmin() throws SQLException,AuthorizeException
|
public void testCreateNotAdmin() throws SQLException,AuthorizeException
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
{
|
{{
|
||||||
AuthorizeManager authManager;
|
// Disallow full Admin perms
|
||||||
{
|
AuthorizeManager.isAdmin((Context)any); result = false;
|
||||||
AuthorizeManager.isAdmin((Context)any); result = false;
|
}};
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
BitstreamFormat found = BitstreamFormat.create(context);
|
BitstreamFormat found = BitstreamFormat.create(context);
|
||||||
fail("Exception should have been thrown");
|
fail("Exception should have been thrown");
|
||||||
@@ -451,14 +447,11 @@ public class BitstreamFormatTest extends AbstractUnitTest
|
|||||||
@Test(expected=AuthorizeException.class)
|
@Test(expected=AuthorizeException.class)
|
||||||
public void testUpdateNotAdmin() throws SQLException, AuthorizeException
|
public void testUpdateNotAdmin() throws SQLException, AuthorizeException
|
||||||
{
|
{
|
||||||
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
new NonStrictExpectations()
|
{{
|
||||||
{
|
// Disallow full Admin perms
|
||||||
AuthorizeManager authManager;
|
AuthorizeManager.isAdmin((Context)any); result = false;
|
||||||
{
|
}};
|
||||||
AuthorizeManager.isAdmin((Context)any); result = false;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
bf.update();
|
bf.update();
|
||||||
fail("Exception should have been thrown");
|
fail("Exception should have been thrown");
|
||||||
@@ -470,14 +463,11 @@ public class BitstreamFormatTest extends AbstractUnitTest
|
|||||||
@Test
|
@Test
|
||||||
public void testUpdateAdmin() throws SQLException, AuthorizeException
|
public void testUpdateAdmin() throws SQLException, AuthorizeException
|
||||||
{
|
{
|
||||||
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
new NonStrictExpectations()
|
{{
|
||||||
{
|
// Allow full Admin perms
|
||||||
AuthorizeManager authManager;
|
AuthorizeManager.isAdmin((Context)any); result = true;
|
||||||
{
|
}};
|
||||||
AuthorizeManager.isAdmin((Context)any); result = true;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
String desc = "Test description";
|
String desc = "Test description";
|
||||||
bf.setDescription(desc);
|
bf.setDescription(desc);
|
||||||
@@ -493,14 +483,11 @@ public class BitstreamFormatTest extends AbstractUnitTest
|
|||||||
@Test(expected=AuthorizeException.class)
|
@Test(expected=AuthorizeException.class)
|
||||||
public void testDeleteNotAdmin() throws SQLException, AuthorizeException
|
public void testDeleteNotAdmin() throws SQLException, AuthorizeException
|
||||||
{
|
{
|
||||||
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
new NonStrictExpectations()
|
{{
|
||||||
{
|
// Disallow full Admin perms
|
||||||
AuthorizeManager authManager;
|
AuthorizeManager.isAdmin((Context)any); result = false;
|
||||||
{
|
}};
|
||||||
AuthorizeManager.isAdmin((Context)any); result = false;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
bf.delete();
|
bf.delete();
|
||||||
fail("Exception should have been thrown");
|
fail("Exception should have been thrown");
|
||||||
@@ -512,15 +499,11 @@ public class BitstreamFormatTest extends AbstractUnitTest
|
|||||||
@Test
|
@Test
|
||||||
public void testDeleteAdmin() throws SQLException, AuthorizeException
|
public void testDeleteAdmin() throws SQLException, AuthorizeException
|
||||||
{
|
{
|
||||||
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
new NonStrictExpectations()
|
{{
|
||||||
{
|
// Allow full Admin perms
|
||||||
AuthorizeManager authManager;
|
AuthorizeManager.isAdmin((Context)any); result = true;
|
||||||
BitstreamFormat unknown;
|
}};
|
||||||
{
|
|
||||||
AuthorizeManager.isAdmin((Context)any); result = true;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
bf.delete();
|
bf.delete();
|
||||||
BitstreamFormat b = BitstreamFormat.find(context, 5);
|
BitstreamFormat b = BitstreamFormat.find(context, 5);
|
||||||
@@ -533,15 +516,12 @@ public class BitstreamFormatTest extends AbstractUnitTest
|
|||||||
@Test(expected=IllegalArgumentException.class)
|
@Test(expected=IllegalArgumentException.class)
|
||||||
public void testDeleteUnknown() throws SQLException, AuthorizeException
|
public void testDeleteUnknown() throws SQLException, AuthorizeException
|
||||||
{
|
{
|
||||||
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
|
{{
|
||||||
|
// Allow full Admin perms
|
||||||
|
AuthorizeManager.isAdmin((Context)any); result = true;
|
||||||
|
}};
|
||||||
|
|
||||||
new NonStrictExpectations()
|
|
||||||
{
|
|
||||||
AuthorizeManager authManager;
|
|
||||||
{
|
|
||||||
AuthorizeManager.isAdmin((Context)any); result = true;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
bunknown.delete();
|
bunknown.delete();
|
||||||
fail("Exception should have been thrown");
|
fail("Exception should have been thrown");
|
||||||
}
|
}
|
||||||
@@ -561,7 +541,7 @@ public class BitstreamFormatTest extends AbstractUnitTest
|
|||||||
* Test of setExtensions method, of class BitstreamFormat.
|
* Test of setExtensions method, of class BitstreamFormat.
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test
|
||||||
public void setExtensions(String[] exts)
|
public void setExtensions()
|
||||||
{
|
{
|
||||||
assertThat("setExtensions 0", bf.getExtensions()[0], equalTo("xml"));
|
assertThat("setExtensions 0", bf.getExtensions()[0], equalTo("xml"));
|
||||||
|
|
||||||
|
@@ -88,7 +88,7 @@ public class BitstreamTest extends AbstractDSpaceObjectTest
|
|||||||
@Test
|
@Test
|
||||||
public void testBSFind() throws SQLException
|
public void testBSFind() throws SQLException
|
||||||
{
|
{
|
||||||
int id = 1;
|
int id = this.bs.getID();
|
||||||
Bitstream found = Bitstream.find(context, id);
|
Bitstream found = Bitstream.find(context, id);
|
||||||
assertThat("testBSFind 0", found, notNullValue());
|
assertThat("testBSFind 0", found, notNullValue());
|
||||||
//the item created by default has no name nor type set
|
//the item created by default has no name nor type set
|
||||||
@@ -291,7 +291,7 @@ public class BitstreamTest extends AbstractDSpaceObjectTest
|
|||||||
@Test
|
@Test
|
||||||
public void testGetSize()
|
public void testGetSize()
|
||||||
{
|
{
|
||||||
long size = 238413;
|
long size = 238413; // yuck, hardcoded!
|
||||||
assertThat("testGetSize 0", bs.getSize(), equalTo(size));
|
assertThat("testGetSize 0", bs.getSize(), equalTo(size));
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -367,15 +367,13 @@ public class BitstreamTest extends AbstractDSpaceObjectTest
|
|||||||
@Test(expected=AuthorizeException.class)
|
@Test(expected=AuthorizeException.class)
|
||||||
public void testUpdateNotAdmin() throws SQLException, AuthorizeException
|
public void testUpdateNotAdmin() throws SQLException, AuthorizeException
|
||||||
{
|
{
|
||||||
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
|
{{
|
||||||
|
// Disallow Bitstream WRITE perms
|
||||||
|
AuthorizeManager.authorizeAction((Context) any, (Bitstream) any,
|
||||||
|
Constants.WRITE); result = new AuthorizeException();
|
||||||
|
|
||||||
new NonStrictExpectations()
|
}};
|
||||||
{
|
|
||||||
AuthorizeManager authManager;
|
|
||||||
{
|
|
||||||
AuthorizeManager.authorizeAction((Context) any, (Bitstream) any,
|
|
||||||
Constants.WRITE); result = new AuthorizeException();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
//TODO: we need to verify the update, how?
|
//TODO: we need to verify the update, how?
|
||||||
bs.update();
|
bs.update();
|
||||||
}
|
}
|
||||||
@@ -386,15 +384,14 @@ public class BitstreamTest extends AbstractDSpaceObjectTest
|
|||||||
@Test
|
@Test
|
||||||
public void testUpdateAdmin() throws SQLException, AuthorizeException
|
public void testUpdateAdmin() throws SQLException, AuthorizeException
|
||||||
{
|
{
|
||||||
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
|
{{
|
||||||
|
// Allow Bitstream WRITE perms
|
||||||
|
AuthorizeManager.authorizeAction((Context) any, (Bitstream) any,
|
||||||
|
Constants.WRITE); result = null;
|
||||||
|
|
||||||
new NonStrictExpectations()
|
}};
|
||||||
{
|
|
||||||
AuthorizeManager authManager;
|
|
||||||
{
|
|
||||||
AuthorizeManager.authorizeAction((Context) any, (Bitstream) any,
|
|
||||||
Constants.WRITE); result = null;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
//TODO: we need to verify the update, how?
|
//TODO: we need to verify the update, how?
|
||||||
bs.update();
|
bs.update();
|
||||||
}
|
}
|
||||||
@@ -403,24 +400,18 @@ public class BitstreamTest extends AbstractDSpaceObjectTest
|
|||||||
* Test of delete method, of class Bitstream.
|
* Test of delete method, of class Bitstream.
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test
|
||||||
public void testDelete() throws SQLException, AuthorizeException
|
public void testDelete() throws IOException, SQLException
|
||||||
{
|
{
|
||||||
bs.delete();
|
// Create a new bitstream, which we can delete. As ordering of these
|
||||||
assertTrue("testDelete 0", bs.isDeleted());
|
// tests is unpredictable we don't want to delete the global bitstream
|
||||||
|
File f = new File(testProps.get("test.bitstream").toString());
|
||||||
|
Bitstream delBS = Bitstream.create(context, new FileInputStream(f));
|
||||||
|
|
||||||
|
assertFalse("testIsDeleted 0", delBS.isDeleted());
|
||||||
|
delBS.delete();
|
||||||
|
assertTrue("testDelete 0", delBS.isDeleted());
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Test of isDeleted method, of class Bitstream.
|
|
||||||
*/
|
|
||||||
@Test
|
|
||||||
public void testIsDeleted() throws SQLException, AuthorizeException
|
|
||||||
{
|
|
||||||
assertFalse("testIsDeleted 0", bs.isDeleted());
|
|
||||||
bs.delete();
|
|
||||||
assertTrue("testIsDeleted 1", bs.isDeleted());
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test of retrieve method, of class Bitstream.
|
* Test of retrieve method, of class Bitstream.
|
||||||
*/
|
*/
|
||||||
@@ -428,14 +419,12 @@ public class BitstreamTest extends AbstractDSpaceObjectTest
|
|||||||
public void testRetrieveCanRead() throws IOException, SQLException,
|
public void testRetrieveCanRead() throws IOException, SQLException,
|
||||||
AuthorizeException
|
AuthorizeException
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
{
|
{{
|
||||||
AuthorizeManager authManager;
|
// Allow Bitstream READ perms
|
||||||
{
|
AuthorizeManager.authorizeAction((Context) any, (Bitstream) any,
|
||||||
AuthorizeManager.authorizeAction((Context) any, (Bitstream) any,
|
Constants.READ); result = null;
|
||||||
Constants.READ); result = null;
|
}};
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
assertThat("testRetrieveCanRead 0", bs.retrieve(), notNullValue());
|
assertThat("testRetrieveCanRead 0", bs.retrieve(), notNullValue());
|
||||||
}
|
}
|
||||||
@@ -447,14 +436,12 @@ public class BitstreamTest extends AbstractDSpaceObjectTest
|
|||||||
public void testRetrieveNoRead() throws IOException, SQLException,
|
public void testRetrieveNoRead() throws IOException, SQLException,
|
||||||
AuthorizeException
|
AuthorizeException
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
{
|
{{
|
||||||
AuthorizeManager authManager;
|
// Disallow Bitstream READ perms
|
||||||
{
|
AuthorizeManager.authorizeAction((Context) any, (Bitstream) any,
|
||||||
AuthorizeManager.authorizeAction((Context) any, (Bitstream) any,
|
Constants.READ); result = new AuthorizeException();
|
||||||
Constants.READ); result = new AuthorizeException();
|
}};
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
assertThat("testRetrieveNoRead 0", bs.retrieve(), notNullValue());
|
assertThat("testRetrieveNoRead 0", bs.retrieve(), notNullValue());
|
||||||
}
|
}
|
||||||
|
@@ -195,14 +195,13 @@ public class BundleTest extends AbstractDSpaceObjectTest
|
|||||||
@Test
|
@Test
|
||||||
public void testGetBitstreamByName() throws FileNotFoundException, SQLException, IOException, AuthorizeException
|
public void testGetBitstreamByName() throws FileNotFoundException, SQLException, IOException, AuthorizeException
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
{
|
{{
|
||||||
AuthorizeManager authManager;
|
// Allow Bundle ADD perms
|
||||||
{
|
AuthorizeManager.authorizeAction((Context) any, (Bundle) any,
|
||||||
AuthorizeManager.authorizeAction((Context) any, (Bundle) any,
|
Constants.ADD); result = null;
|
||||||
Constants.ADD); result = null;
|
|
||||||
}
|
}};
|
||||||
};
|
|
||||||
|
|
||||||
String name = "name";
|
String name = "name";
|
||||||
//by default there is no bitstream
|
//by default there is no bitstream
|
||||||
@@ -225,14 +224,13 @@ public class BundleTest extends AbstractDSpaceObjectTest
|
|||||||
@Test
|
@Test
|
||||||
public void testGetBitstreams() throws FileNotFoundException, SQLException, IOException, AuthorizeException
|
public void testGetBitstreams() throws FileNotFoundException, SQLException, IOException, AuthorizeException
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
{
|
{{
|
||||||
AuthorizeManager authManager;
|
// Allow Bundle ADD perms
|
||||||
{
|
AuthorizeManager.authorizeAction((Context) any, (Bundle) any,
|
||||||
AuthorizeManager.authorizeAction((Context) any, (Bundle) any,
|
Constants.ADD); result = null;
|
||||||
Constants.ADD); result = null;
|
|
||||||
}
|
}};
|
||||||
};
|
|
||||||
|
|
||||||
//default bundle has no bitstreams
|
//default bundle has no bitstreams
|
||||||
assertThat("testGetBitstreams 0", b.getBitstreams(), notNullValue());
|
assertThat("testGetBitstreams 0", b.getBitstreams(), notNullValue());
|
||||||
@@ -267,16 +265,14 @@ public class BundleTest extends AbstractDSpaceObjectTest
|
|||||||
@Test(expected=AuthorizeException.class)
|
@Test(expected=AuthorizeException.class)
|
||||||
public void testCreateBitstreamNoAuth() throws FileNotFoundException, AuthorizeException, SQLException, IOException
|
public void testCreateBitstreamNoAuth() throws FileNotFoundException, AuthorizeException, SQLException, IOException
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
{
|
{{
|
||||||
AuthorizeManager authManager;
|
// Disallow Bundle ADD perms
|
||||||
{
|
AuthorizeManager.authorizeAction((Context) any, (Bundle) any,
|
||||||
AuthorizeManager.authorizeAction((Context) any, (Bundle) any,
|
Constants.ADD); result = new AuthorizeException();
|
||||||
Constants.ADD); result = new AuthorizeException();
|
|
||||||
}
|
}};
|
||||||
};
|
|
||||||
|
|
||||||
String name = "name";
|
|
||||||
File f = new File(testProps.get("test.bitstream").toString());
|
File f = new File(testProps.get("test.bitstream").toString());
|
||||||
Bitstream bs = b.createBitstream(new FileInputStream(f));
|
Bitstream bs = b.createBitstream(new FileInputStream(f));
|
||||||
fail("Exception should be thrown");
|
fail("Exception should be thrown");
|
||||||
@@ -288,14 +284,13 @@ public class BundleTest extends AbstractDSpaceObjectTest
|
|||||||
@Test
|
@Test
|
||||||
public void testCreateBitstreamAuth() throws FileNotFoundException, AuthorizeException, SQLException, IOException
|
public void testCreateBitstreamAuth() throws FileNotFoundException, AuthorizeException, SQLException, IOException
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
{
|
{{
|
||||||
AuthorizeManager authManager;
|
// Allow Bundle ADD perms
|
||||||
{
|
AuthorizeManager.authorizeAction((Context) any, (Bundle) any,
|
||||||
AuthorizeManager.authorizeAction((Context) any, (Bundle) any,
|
Constants.ADD); result = null;
|
||||||
Constants.ADD); result = null;
|
|
||||||
}
|
}};
|
||||||
};
|
|
||||||
|
|
||||||
String name = "name";
|
String name = "name";
|
||||||
File f = new File(testProps.get("test.bitstream").toString());
|
File f = new File(testProps.get("test.bitstream").toString());
|
||||||
@@ -312,17 +307,15 @@ public class BundleTest extends AbstractDSpaceObjectTest
|
|||||||
@Test(expected=AuthorizeException.class)
|
@Test(expected=AuthorizeException.class)
|
||||||
public void testRegisterBitstreamNoAuth() throws AuthorizeException, IOException, SQLException
|
public void testRegisterBitstreamNoAuth() throws AuthorizeException, IOException, SQLException
|
||||||
{
|
{
|
||||||
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
|
{{
|
||||||
|
// Disallow Bundle ADD perms
|
||||||
|
AuthorizeManager.authorizeAction((Context) any, (Bundle) any,
|
||||||
|
Constants.ADD); result = new AuthorizeException();
|
||||||
|
|
||||||
new NonStrictExpectations()
|
}};
|
||||||
{
|
|
||||||
AuthorizeManager authManager;
|
|
||||||
{
|
|
||||||
AuthorizeManager.authorizeAction((Context) any, (Bundle) any, Constants.ADD);
|
|
||||||
result = new AuthorizeException();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
int assetstore = 0;
|
int assetstore = 0; //default assetstore
|
||||||
File f = new File(testProps.get("test.bitstream").toString());
|
File f = new File(testProps.get("test.bitstream").toString());
|
||||||
Bitstream bs = b.registerBitstream(assetstore, f.getAbsolutePath());
|
Bitstream bs = b.registerBitstream(assetstore, f.getAbsolutePath());
|
||||||
fail("Exception should be thrown");
|
fail("Exception should be thrown");
|
||||||
@@ -334,17 +327,15 @@ public class BundleTest extends AbstractDSpaceObjectTest
|
|||||||
@Test
|
@Test
|
||||||
public void testRegisterBitstreamAuth() throws AuthorizeException, IOException, SQLException
|
public void testRegisterBitstreamAuth() throws AuthorizeException, IOException, SQLException
|
||||||
{
|
{
|
||||||
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
|
{{
|
||||||
|
// Allow Bundle ADD perms
|
||||||
|
AuthorizeManager.authorizeAction((Context) any, (Bundle) any,
|
||||||
|
Constants.ADD); result = null;
|
||||||
|
|
||||||
new NonStrictExpectations()
|
}};
|
||||||
{
|
|
||||||
AuthorizeManager authManager;
|
|
||||||
{
|
|
||||||
AuthorizeManager.authorizeAction((Context) any, (Bundle) any, Constants.ADD);
|
|
||||||
result = null;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
int assetstore = 0;
|
int assetstore = 0; //default assetstore
|
||||||
String name = "name bitstream";
|
String name = "name bitstream";
|
||||||
File f = new File(testProps.get("test.bitstream").toString());
|
File f = new File(testProps.get("test.bitstream").toString());
|
||||||
Bitstream bs = b.registerBitstream(assetstore, f.getName());
|
Bitstream bs = b.registerBitstream(assetstore, f.getName());
|
||||||
@@ -358,19 +349,20 @@ public class BundleTest extends AbstractDSpaceObjectTest
|
|||||||
* Test of addBitstream method, of class Bundle.
|
* Test of addBitstream method, of class Bundle.
|
||||||
*/
|
*/
|
||||||
@Test(expected=AuthorizeException.class)
|
@Test(expected=AuthorizeException.class)
|
||||||
public void testAddBitstreamNoAuth() throws SQLException, AuthorizeException
|
public void testAddBitstreamNoAuth() throws SQLException, AuthorizeException, IOException
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
{
|
{{
|
||||||
AuthorizeManager authManager;
|
// Disallow Bundle ADD perms
|
||||||
{
|
AuthorizeManager.authorizeAction((Context) any, (Bundle) any,
|
||||||
AuthorizeManager.authorizeAction((Context) any, (Bundle) any, Constants.ADD);
|
Constants.ADD); result = new AuthorizeException();
|
||||||
result = new AuthorizeException();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
int id = 1;
|
}};
|
||||||
Bitstream bs = Bitstream.find(context, id);
|
|
||||||
|
// create a new Bitstream to add to Bundle
|
||||||
|
File f = new File(testProps.get("test.bitstream").toString());
|
||||||
|
Bitstream bs = Bitstream.create(context, new FileInputStream(f));
|
||||||
|
bs.setName("name");
|
||||||
b.addBitstream(bs);
|
b.addBitstream(bs);
|
||||||
fail("Exception should have been thrown");
|
fail("Exception should have been thrown");
|
||||||
}
|
}
|
||||||
@@ -381,16 +373,14 @@ public class BundleTest extends AbstractDSpaceObjectTest
|
|||||||
@Test
|
@Test
|
||||||
public void testAddBitstreamAuth() throws SQLException, AuthorizeException, FileNotFoundException, IOException
|
public void testAddBitstreamAuth() throws SQLException, AuthorizeException, FileNotFoundException, IOException
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
{
|
{{
|
||||||
AuthorizeManager authManager;
|
// Allow Bundle ADD perms
|
||||||
{
|
AuthorizeManager.authorizeAction((Context) any, (Bundle) any,
|
||||||
AuthorizeManager.authorizeAction((Context) any, (Bundle) any, Constants.ADD);
|
Constants.ADD); result = null;
|
||||||
result = null;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
|
|
||||||
|
}};
|
||||||
|
|
||||||
File f = new File(testProps.get("test.bitstream").toString());
|
File f = new File(testProps.get("test.bitstream").toString());
|
||||||
Bitstream bs = Bitstream.create(context, new FileInputStream(f));
|
Bitstream bs = Bitstream.create(context, new FileInputStream(f));
|
||||||
bs.setName("name");
|
bs.setName("name");
|
||||||
@@ -406,14 +396,13 @@ public class BundleTest extends AbstractDSpaceObjectTest
|
|||||||
@Test(expected=AuthorizeException.class)
|
@Test(expected=AuthorizeException.class)
|
||||||
public void testRemoveBitstreamNoAuth() throws SQLException, AuthorizeException, IOException
|
public void testRemoveBitstreamNoAuth() throws SQLException, AuthorizeException, IOException
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
{
|
{{
|
||||||
AuthorizeManager authManager;
|
// Disallow Bundle REMOVE perms
|
||||||
{
|
AuthorizeManager.authorizeAction((Context) any, (Bundle) any,
|
||||||
AuthorizeManager.authorizeAction((Context) any, (Bundle) any, Constants.REMOVE);
|
Constants.REMOVE); result = new AuthorizeException();
|
||||||
result = new AuthorizeException();
|
|
||||||
}
|
}};
|
||||||
};
|
|
||||||
|
|
||||||
File f = new File(testProps.get("test.bitstream").toString());
|
File f = new File(testProps.get("test.bitstream").toString());
|
||||||
Bitstream bs = Bitstream.create(context, new FileInputStream(f));
|
Bitstream bs = Bitstream.create(context, new FileInputStream(f));
|
||||||
@@ -428,18 +417,19 @@ public class BundleTest extends AbstractDSpaceObjectTest
|
|||||||
@Test
|
@Test
|
||||||
public void testRemoveBitstreamAuth() throws SQLException, AuthorizeException, IOException
|
public void testRemoveBitstreamAuth() throws SQLException, AuthorizeException, IOException
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
{
|
{{
|
||||||
AuthorizeManager authManager;
|
// Allow Bundle ADD perms (to create a new Bitstream and add it)
|
||||||
{
|
AuthorizeManager.authorizeAction((Context) any, (Bundle) any,
|
||||||
AuthorizeManager.authorizeAction((Context) any, (Bundle) any, Constants.REMOVE);
|
Constants.ADD); result = null;
|
||||||
result = null;
|
// Allow Bundle REMOVE perms (to test remove)
|
||||||
}
|
AuthorizeManager.authorizeAction((Context) any, (Bundle) any,
|
||||||
};
|
Constants.REMOVE); result = null;
|
||||||
|
}};
|
||||||
|
|
||||||
int id = 1;
|
// Create a new Bitstream to test with
|
||||||
File f = new File(testProps.get("test.bitstream").toString());
|
File f = new File(testProps.get("test.bitstream").toString());
|
||||||
Bitstream bs = Bitstream.find(context, id);
|
Bitstream bs = Bitstream.create(context, new FileInputStream(f));
|
||||||
b.addBitstream(bs);
|
b.addBitstream(bs);
|
||||||
context.commit();
|
context.commit();
|
||||||
b.removeBitstream(bs);
|
b.removeBitstream(bs);
|
||||||
@@ -464,9 +454,29 @@ public class BundleTest extends AbstractDSpaceObjectTest
|
|||||||
@Test
|
@Test
|
||||||
public void testDelete() throws SQLException, AuthorizeException, IOException
|
public void testDelete() throws SQLException, AuthorizeException, IOException
|
||||||
{
|
{
|
||||||
int id = b.getID();
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
b.delete();
|
{{
|
||||||
|
// Allow Bundle ADD perms (to create a new Bitstream and add it)
|
||||||
|
AuthorizeManager.authorizeAction((Context) any, (Bundle) any,
|
||||||
|
Constants.ADD); result = null;
|
||||||
|
// Allow Bundle REMOVE perms (to test remove)
|
||||||
|
AuthorizeManager.authorizeAction((Context) any, (Bundle) any,
|
||||||
|
Constants.REMOVE); result = null;
|
||||||
|
}};
|
||||||
|
|
||||||
|
// Create a new Bundle to be deleted
|
||||||
|
Bundle created = Bundle.create(context);
|
||||||
|
//let's add a bitstream
|
||||||
|
File f = new File(testProps.get("test.bitstream").toString());
|
||||||
|
Bitstream bs = Bitstream.create(context, new FileInputStream(f));
|
||||||
|
created.addBitstream(bs);
|
||||||
|
// Ensure both are saved to context
|
||||||
context.commit();
|
context.commit();
|
||||||
|
|
||||||
|
// Now, delete the newly added Bundle and Bitstream
|
||||||
|
int id = created.getID();
|
||||||
|
created.delete();
|
||||||
|
// Bundle should not exist anymore
|
||||||
assertThat("testDelete 0", Bundle.find(context, id), nullValue());
|
assertThat("testDelete 0", Bundle.find(context, id), nullValue());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -15,13 +15,13 @@ import org.dspace.core.Context;
|
|||||||
|
|
||||||
import java.io.FileInputStream;
|
import java.io.FileInputStream;
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.sql.SQLException;
|
|
||||||
|
|
||||||
import org.dspace.AbstractUnitTest;
|
import org.dspace.AbstractUnitTest;
|
||||||
import org.apache.log4j.Logger;
|
import org.apache.log4j.Logger;
|
||||||
import org.junit.*;
|
import org.junit.*;
|
||||||
import static org.junit.Assert.* ;
|
import static org.junit.Assert.* ;
|
||||||
import static org.hamcrest.CoreMatchers.*;
|
import static org.hamcrest.CoreMatchers.*;
|
||||||
|
import org.junit.rules.ExpectedException;
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -34,33 +34,9 @@ public class InstallItemTest extends AbstractUnitTest
|
|||||||
/** log4j category */
|
/** log4j category */
|
||||||
private static final Logger log = Logger.getLogger(InstallItemTest.class);
|
private static final Logger log = Logger.getLogger(InstallItemTest.class);
|
||||||
|
|
||||||
/**
|
/** Used to check/verify thrown exceptions in below tests **/
|
||||||
* This method will be run before every test as per @Before. It will
|
@Rule
|
||||||
* initialize resources required for the tests.
|
public ExpectedException thrown = ExpectedException.none();
|
||||||
*
|
|
||||||
* Other methods can be annotated with @Before here or in subclasses
|
|
||||||
* but no execution order is guaranteed
|
|
||||||
*/
|
|
||||||
@Before
|
|
||||||
@Override
|
|
||||||
public void init()
|
|
||||||
{
|
|
||||||
super.init();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This method will be run after every test as per @After. It will
|
|
||||||
* clean resources initialized by the @Before methods.
|
|
||||||
*
|
|
||||||
* Other methods can be annotated with @After here or in subclasses
|
|
||||||
* but no execution order is guaranteed
|
|
||||||
*/
|
|
||||||
@After
|
|
||||||
@Override
|
|
||||||
public void destroy()
|
|
||||||
{
|
|
||||||
super.destroy();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test of installItem method, of class InstallItem.
|
* Test of installItem method, of class InstallItem.
|
||||||
@@ -99,19 +75,18 @@ public class InstallItemTest extends AbstractUnitTest
|
|||||||
/**
|
/**
|
||||||
* Test of installItem method (with an invalid handle), of class InstallItem.
|
* Test of installItem method (with an invalid handle), of class InstallItem.
|
||||||
*/
|
*/
|
||||||
@Test(expected=SQLException.class)
|
@Test
|
||||||
public void testInstallItem_invalidHandle() throws Exception
|
public void testInstallItem_invalidHandle() throws Exception
|
||||||
{
|
{
|
||||||
//Default to Full-Admin rights
|
//Default to Full-Admin rights
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
{
|
{{
|
||||||
AuthorizeManager authManager;
|
// Deny Community ADD perms
|
||||||
{
|
AuthorizeManager.authorizeActionBoolean((Context) any, (Community) any,
|
||||||
AuthorizeManager.authorizeActionBoolean((Context) any, (Community) any,
|
Constants.ADD); result = false;
|
||||||
Constants.ADD); result = false;
|
// Allow full Admin perms
|
||||||
AuthorizeManager.isAdmin((Context) any); result = true;
|
AuthorizeManager.isAdmin((Context) any); result = true;
|
||||||
}
|
}};
|
||||||
};
|
|
||||||
|
|
||||||
String handle = "1345/567";
|
String handle = "1345/567";
|
||||||
Collection col = Collection.create(context);
|
Collection col = Collection.create(context);
|
||||||
@@ -119,9 +94,12 @@ public class InstallItemTest extends AbstractUnitTest
|
|||||||
WorkspaceItem is2 = WorkspaceItem.create(context, col, false);
|
WorkspaceItem is2 = WorkspaceItem.create(context, col, false);
|
||||||
|
|
||||||
//Test assigning the same Handle to two different items
|
//Test assigning the same Handle to two different items
|
||||||
// this should throw an exception
|
InstallItem.installItem(context, is, handle);
|
||||||
Item result1 = InstallItem.installItem(context, is, handle);
|
|
||||||
Item result2 = InstallItem.installItem(context, is2, handle);
|
// Assigning the same handle again should throw a RuntimeException
|
||||||
|
thrown.expect(RuntimeException.class);
|
||||||
|
thrown.expectMessage("Error while attempting to create identifier");
|
||||||
|
InstallItem.installItem(context, is2, handle);
|
||||||
fail("Exception expected");
|
fail("Exception expected");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -30,7 +30,6 @@ import mockit.*;
|
|||||||
import org.dspace.app.util.AuthorizeUtil;
|
import org.dspace.app.util.AuthorizeUtil;
|
||||||
import org.dspace.authorize.AuthorizeManager;
|
import org.dspace.authorize.AuthorizeManager;
|
||||||
import org.dspace.authorize.ResourcePolicy;
|
import org.dspace.authorize.ResourcePolicy;
|
||||||
import org.dspace.content.authority.MetadataAuthorityManager;
|
|
||||||
import org.dspace.core.Constants;
|
import org.dspace.core.Constants;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -684,14 +683,13 @@ public class ItemTest extends AbstractDSpaceObjectTest
|
|||||||
@Test
|
@Test
|
||||||
public void testCreateBundleAuth() throws Exception
|
public void testCreateBundleAuth() throws Exception
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
{
|
{{
|
||||||
AuthorizeManager authManager;
|
// Allow Item ADD perms
|
||||||
{
|
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
||||||
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
Constants.ADD); result = null;
|
||||||
Constants.ADD, true); result = null;
|
|
||||||
}
|
}};
|
||||||
};
|
|
||||||
|
|
||||||
String name = "bundle";
|
String name = "bundle";
|
||||||
Bundle created = it.createBundle(name);
|
Bundle created = it.createBundle(name);
|
||||||
@@ -707,14 +705,13 @@ public class ItemTest extends AbstractDSpaceObjectTest
|
|||||||
@Test(expected=SQLException.class)
|
@Test(expected=SQLException.class)
|
||||||
public void testCreateBundleNoName() throws Exception
|
public void testCreateBundleNoName() throws Exception
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
{
|
{{
|
||||||
AuthorizeManager authManager;
|
// Allow Item ADD perms
|
||||||
{
|
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
||||||
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
Constants.ADD); result = null;
|
||||||
Constants.ADD, true); result = null;
|
|
||||||
}
|
}};
|
||||||
};
|
|
||||||
|
|
||||||
String name = "";
|
String name = "";
|
||||||
Bundle created = it.createBundle(name);
|
Bundle created = it.createBundle(name);
|
||||||
@@ -727,14 +724,13 @@ public class ItemTest extends AbstractDSpaceObjectTest
|
|||||||
@Test(expected=SQLException.class)
|
@Test(expected=SQLException.class)
|
||||||
public void testCreateBundleNoName2() throws Exception
|
public void testCreateBundleNoName2() throws Exception
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
{
|
{{
|
||||||
AuthorizeManager authManager;
|
// Allow Item ADD perms
|
||||||
{
|
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
||||||
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
Constants.ADD); result = null;
|
||||||
Constants.ADD, true); result = null;
|
|
||||||
}
|
}};
|
||||||
};
|
|
||||||
|
|
||||||
String name = null;
|
String name = null;
|
||||||
Bundle created = it.createBundle(name);
|
Bundle created = it.createBundle(name);
|
||||||
@@ -748,14 +744,13 @@ public class ItemTest extends AbstractDSpaceObjectTest
|
|||||||
@Test(expected=AuthorizeException.class)
|
@Test(expected=AuthorizeException.class)
|
||||||
public void testCreateBundleNoAuth() throws Exception
|
public void testCreateBundleNoAuth() throws Exception
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
{
|
{{
|
||||||
AuthorizeManager authManager;
|
// Disallow Item ADD perms
|
||||||
{
|
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
||||||
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
Constants.ADD); result = new AuthorizeException();
|
||||||
Constants.ADD); result = new AuthorizeException();
|
|
||||||
}
|
}};
|
||||||
};
|
|
||||||
|
|
||||||
String name = "bundle";
|
String name = "bundle";
|
||||||
Bundle created = it.createBundle(name);
|
Bundle created = it.createBundle(name);
|
||||||
@@ -768,14 +763,13 @@ public class ItemTest extends AbstractDSpaceObjectTest
|
|||||||
@Test
|
@Test
|
||||||
public void testAddBundleAuth() throws Exception
|
public void testAddBundleAuth() throws Exception
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
{
|
{{
|
||||||
AuthorizeManager authManager;
|
// Allow Item ADD perms
|
||||||
{
|
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
||||||
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
Constants.ADD); result = null;
|
||||||
Constants.ADD, true); result = null;
|
|
||||||
}
|
}};
|
||||||
};
|
|
||||||
|
|
||||||
String name = "bundle";
|
String name = "bundle";
|
||||||
Bundle created = Bundle.create(context);
|
Bundle created = Bundle.create(context);
|
||||||
@@ -793,14 +787,13 @@ public class ItemTest extends AbstractDSpaceObjectTest
|
|||||||
@Test(expected=AuthorizeException.class)
|
@Test(expected=AuthorizeException.class)
|
||||||
public void testAddBundleNoAuth() throws Exception
|
public void testAddBundleNoAuth() throws Exception
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
{
|
{{
|
||||||
AuthorizeManager authManager;
|
// Disallow Item ADD perms
|
||||||
{
|
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
||||||
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
Constants.ADD); result = new AuthorizeException();
|
||||||
Constants.ADD); result = new AuthorizeException();
|
|
||||||
}
|
}};
|
||||||
};
|
|
||||||
|
|
||||||
String name = "bundle";
|
String name = "bundle";
|
||||||
Bundle created = Bundle.create(context);
|
Bundle created = Bundle.create(context);
|
||||||
@@ -816,16 +809,14 @@ public class ItemTest extends AbstractDSpaceObjectTest
|
|||||||
@Test
|
@Test
|
||||||
public void testRemoveBundleAuth() throws Exception
|
public void testRemoveBundleAuth() throws Exception
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
{
|
{{
|
||||||
AuthorizeManager authManager;
|
// Allow Item ADD and REMOVE perms
|
||||||
{
|
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
||||||
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
Constants.ADD); result = null;
|
||||||
Constants.ADD, true); result = null;
|
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
||||||
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
Constants.REMOVE); result = null;
|
||||||
Constants.REMOVE, true); result = null;
|
}};
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
String name = "bundle";
|
String name = "bundle";
|
||||||
Bundle created = Bundle.create(context);
|
Bundle created = Bundle.create(context);
|
||||||
@@ -843,16 +834,15 @@ public class ItemTest extends AbstractDSpaceObjectTest
|
|||||||
@Test(expected=AuthorizeException.class)
|
@Test(expected=AuthorizeException.class)
|
||||||
public void testRemoveBundleNoAuth() throws Exception
|
public void testRemoveBundleNoAuth() throws Exception
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
{
|
{{
|
||||||
AuthorizeManager authManager;
|
// Allow Item ADD perms
|
||||||
{
|
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
||||||
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
Constants.ADD); result = null;
|
||||||
Constants.ADD); result = null;
|
// Disallow Item REMOVE perms
|
||||||
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
||||||
Constants.REMOVE); result = new AuthorizeException();
|
Constants.REMOVE); result = new AuthorizeException();
|
||||||
}
|
}};
|
||||||
};
|
|
||||||
|
|
||||||
String name = "bundle";
|
String name = "bundle";
|
||||||
Bundle created = Bundle.create(context);
|
Bundle created = Bundle.create(context);
|
||||||
@@ -869,14 +859,13 @@ public class ItemTest extends AbstractDSpaceObjectTest
|
|||||||
@Test
|
@Test
|
||||||
public void testCreateSingleBitstream_InputStream_StringAuth() throws Exception
|
public void testCreateSingleBitstream_InputStream_StringAuth() throws Exception
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
{
|
{{
|
||||||
AuthorizeManager authManager;
|
// Allow Item ADD perms
|
||||||
{
|
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
||||||
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
Constants.ADD); result = null;
|
||||||
Constants.ADD, true); result = null;
|
|
||||||
}
|
}};
|
||||||
};
|
|
||||||
|
|
||||||
String name = "new bundle";
|
String name = "new bundle";
|
||||||
File f = new File(testProps.get("test.bitstream").toString());
|
File f = new File(testProps.get("test.bitstream").toString());
|
||||||
@@ -890,14 +879,13 @@ public class ItemTest extends AbstractDSpaceObjectTest
|
|||||||
@Test(expected=AuthorizeException.class)
|
@Test(expected=AuthorizeException.class)
|
||||||
public void testCreateSingleBitstream_InputStream_StringNoAuth() throws Exception
|
public void testCreateSingleBitstream_InputStream_StringNoAuth() throws Exception
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
{
|
{{
|
||||||
AuthorizeManager authManager;
|
// Disallow Item ADD perms
|
||||||
{
|
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
||||||
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
Constants.ADD); result = new AuthorizeException();
|
||||||
Constants.ADD); result = new AuthorizeException();
|
|
||||||
}
|
}};
|
||||||
};
|
|
||||||
|
|
||||||
String name = "new bundle";
|
String name = "new bundle";
|
||||||
File f = new File(testProps.get("test.bitstream").toString());
|
File f = new File(testProps.get("test.bitstream").toString());
|
||||||
@@ -911,14 +899,13 @@ public class ItemTest extends AbstractDSpaceObjectTest
|
|||||||
@Test
|
@Test
|
||||||
public void testCreateSingleBitstream_InputStreamAuth() throws Exception
|
public void testCreateSingleBitstream_InputStreamAuth() throws Exception
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
{
|
{{
|
||||||
AuthorizeManager authManager;
|
// Allow Item ADD perms
|
||||||
{
|
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
||||||
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
Constants.ADD); result = null;
|
||||||
Constants.ADD, true); result = null;
|
|
||||||
}
|
}};
|
||||||
};
|
|
||||||
|
|
||||||
File f = new File(testProps.get("test.bitstream").toString());
|
File f = new File(testProps.get("test.bitstream").toString());
|
||||||
Bitstream result = it.createSingleBitstream(new FileInputStream(f));
|
Bitstream result = it.createSingleBitstream(new FileInputStream(f));
|
||||||
@@ -931,14 +918,13 @@ public class ItemTest extends AbstractDSpaceObjectTest
|
|||||||
@Test(expected=AuthorizeException.class)
|
@Test(expected=AuthorizeException.class)
|
||||||
public void testCreateSingleBitstream_InputStreamNoAuth() throws Exception
|
public void testCreateSingleBitstream_InputStreamNoAuth() throws Exception
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
{
|
{{
|
||||||
AuthorizeManager authManager;
|
// Disallow Item ADD perms
|
||||||
{
|
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
||||||
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
Constants.ADD); result = new AuthorizeException();
|
||||||
Constants.ADD); result = new AuthorizeException();
|
|
||||||
}
|
}};
|
||||||
};
|
|
||||||
|
|
||||||
File f = new File(testProps.get("test.bitstream").toString());
|
File f = new File(testProps.get("test.bitstream").toString());
|
||||||
Bitstream result = it.createSingleBitstream(new FileInputStream(f));
|
Bitstream result = it.createSingleBitstream(new FileInputStream(f));
|
||||||
@@ -961,16 +947,14 @@ public class ItemTest extends AbstractDSpaceObjectTest
|
|||||||
@Test
|
@Test
|
||||||
public void testRemoveDSpaceLicenseAuth() throws Exception
|
public void testRemoveDSpaceLicenseAuth() throws Exception
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
{
|
{{
|
||||||
AuthorizeManager authManager;
|
// Allow Item ADD and REMOVE perms
|
||||||
{
|
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
||||||
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
Constants.ADD); result = null;
|
||||||
Constants.ADD, true); result = null;
|
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
||||||
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
Constants.REMOVE); result = null;
|
||||||
Constants.REMOVE, true); result = null;
|
}};
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
String name = "LICENSE";
|
String name = "LICENSE";
|
||||||
Bundle created = Bundle.create(context);
|
Bundle created = Bundle.create(context);
|
||||||
@@ -988,16 +972,15 @@ public class ItemTest extends AbstractDSpaceObjectTest
|
|||||||
@Test(expected=AuthorizeException.class)
|
@Test(expected=AuthorizeException.class)
|
||||||
public void testRemoveDSpaceLicenseNoAuth() throws Exception
|
public void testRemoveDSpaceLicenseNoAuth() throws Exception
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
{
|
{{
|
||||||
AuthorizeManager authManager;
|
// Allow Item ADD perms
|
||||||
{
|
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
||||||
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
Constants.ADD); result = null;
|
||||||
Constants.ADD); result = null;
|
// Disallow Item REMOVE perms
|
||||||
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
||||||
Constants.REMOVE); result = new AuthorizeException();
|
Constants.REMOVE); result = new AuthorizeException();
|
||||||
}
|
}};
|
||||||
};
|
|
||||||
|
|
||||||
String name = "LICENSE";
|
String name = "LICENSE";
|
||||||
Bundle created = Bundle.create(context);
|
Bundle created = Bundle.create(context);
|
||||||
@@ -1014,16 +997,14 @@ public class ItemTest extends AbstractDSpaceObjectTest
|
|||||||
@Test
|
@Test
|
||||||
public void testRemoveLicensesAuth() throws Exception
|
public void testRemoveLicensesAuth() throws Exception
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
{
|
{{
|
||||||
AuthorizeManager authManager;
|
// Allow Item ADD and REMOVE perms
|
||||||
{
|
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
||||||
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
Constants.ADD); result = null;
|
||||||
Constants.ADD); result = null;
|
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
||||||
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
Constants.REMOVE); result = null;
|
||||||
Constants.REMOVE); result = null;
|
}};
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
String name = "LICENSE";
|
String name = "LICENSE";
|
||||||
Bundle created = Bundle.create(context);
|
Bundle created = Bundle.create(context);
|
||||||
@@ -1049,16 +1030,15 @@ public class ItemTest extends AbstractDSpaceObjectTest
|
|||||||
@Test(expected=AuthorizeException.class)
|
@Test(expected=AuthorizeException.class)
|
||||||
public void testRemoveLicensesNoAuth() throws Exception
|
public void testRemoveLicensesNoAuth() throws Exception
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
{
|
{{
|
||||||
AuthorizeManager authManager;
|
// Allow Item ADD perms
|
||||||
{
|
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
||||||
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
Constants.ADD); result = null;
|
||||||
Constants.ADD); result = null;
|
// Disallow Item REMOVE perms
|
||||||
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
||||||
Constants.REMOVE); result = new AuthorizeException();
|
Constants.REMOVE); result = new AuthorizeException();
|
||||||
}
|
}};
|
||||||
};
|
|
||||||
|
|
||||||
String name = "LICENSE";
|
String name = "LICENSE";
|
||||||
Bundle created = Bundle.create(context);
|
Bundle created = Bundle.create(context);
|
||||||
@@ -1082,14 +1062,13 @@ public class ItemTest extends AbstractDSpaceObjectTest
|
|||||||
@Test
|
@Test
|
||||||
public void testUpdateAuth() throws Exception
|
public void testUpdateAuth() throws Exception
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
{
|
{{
|
||||||
AuthorizeManager authManager;
|
// Allow Item WRITE perms
|
||||||
{
|
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
||||||
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
Constants.WRITE); result = null;
|
||||||
Constants.WRITE); result = null;
|
|
||||||
}
|
}};
|
||||||
};
|
|
||||||
|
|
||||||
//TOOD: how to test?
|
//TOOD: how to test?
|
||||||
it.update();
|
it.update();
|
||||||
@@ -1101,21 +1080,22 @@ public class ItemTest extends AbstractDSpaceObjectTest
|
|||||||
@Test
|
@Test
|
||||||
public void testUpdateAuth2() throws Exception
|
public void testUpdateAuth2() throws Exception
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
// Test permission inheritence
|
||||||
{
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
AuthorizeManager authManager;
|
{{
|
||||||
{
|
// Disallow Item WRITE perms
|
||||||
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
||||||
Constants.WRITE); result = null;
|
Constants.WRITE); result = new AuthorizeException();
|
||||||
AuthorizeManager.authorizeActionBoolean((Context) any, (Community) any,
|
// Allow parent Community WRITE and ADD perms
|
||||||
Constants.WRITE,true); result = false;
|
AuthorizeManager.authorizeActionBoolean((Context) any, (Community) any,
|
||||||
AuthorizeManager.authorizeActionBoolean((Context) any, (Community) any,
|
Constants.WRITE,true); result = true;
|
||||||
Constants.ADD,true); result = false;
|
AuthorizeManager.authorizeActionBoolean((Context) any, (Community) any,
|
||||||
AuthorizeManager.authorizeAction((Context) any, (Collection) any,
|
Constants.ADD,true); result = true;
|
||||||
Constants.WRITE,true); result = new AuthorizeException();
|
// Disallow parent Collection WRITE perms
|
||||||
|
AuthorizeManager.authorizeAction((Context) any, (Collection) any,
|
||||||
|
Constants.WRITE,true); result = new AuthorizeException();
|
||||||
|
|
||||||
}
|
}};
|
||||||
};
|
|
||||||
|
|
||||||
context.turnOffAuthorisationSystem();
|
context.turnOffAuthorisationSystem();
|
||||||
Collection c = Collection.create(context);
|
Collection c = Collection.create(context);
|
||||||
@@ -1132,20 +1112,21 @@ public class ItemTest extends AbstractDSpaceObjectTest
|
|||||||
@Test(expected=AuthorizeException.class)
|
@Test(expected=AuthorizeException.class)
|
||||||
public void testUpdateNoAuth() throws Exception
|
public void testUpdateNoAuth() throws Exception
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
// Test permission inheritence
|
||||||
{
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
AuthorizeManager authManager;
|
{{
|
||||||
{
|
// Disallow Item WRITE perms
|
||||||
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
||||||
Constants.WRITE); result = new AuthorizeException();
|
Constants.WRITE); result = new AuthorizeException();
|
||||||
AuthorizeManager.authorizeActionBoolean((Context) any, (Community) any,
|
// Disallow parent Community WRITE or ADD perms
|
||||||
Constants.WRITE,anyBoolean); result = false;
|
AuthorizeManager.authorizeActionBoolean((Context) any, (Community) any,
|
||||||
AuthorizeManager.authorizeActionBoolean((Context) any, (Community) any,
|
Constants.WRITE,anyBoolean); result = false;
|
||||||
Constants.ADD,anyBoolean); result = false;
|
AuthorizeManager.authorizeActionBoolean((Context) any, (Community) any,
|
||||||
AuthorizeManager.authorizeAction((Context) any, (Collection) any,
|
Constants.ADD,anyBoolean); result = false;
|
||||||
Constants.WRITE,anyBoolean); result = new AuthorizeException();
|
// Disallow parent Collection WRITE perms
|
||||||
}
|
AuthorizeManager.authorizeAction((Context) any, (Collection) any,
|
||||||
};
|
Constants.WRITE,anyBoolean); result = new AuthorizeException();
|
||||||
|
}};
|
||||||
|
|
||||||
context.turnOffAuthorisationSystem();
|
context.turnOffAuthorisationSystem();
|
||||||
Collection c = Collection.create(context);
|
Collection c = Collection.create(context);
|
||||||
@@ -1162,14 +1143,12 @@ public class ItemTest extends AbstractDSpaceObjectTest
|
|||||||
@Test
|
@Test
|
||||||
public void testWithdrawAuth() throws Exception
|
public void testWithdrawAuth() throws Exception
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeUtil.class)
|
||||||
{
|
{{
|
||||||
AuthorizeUtil authManager;
|
// Allow Item withdraw permissions
|
||||||
{
|
AuthorizeUtil.authorizeWithdrawItem((Context) any, (Item) any);
|
||||||
AuthorizeUtil.authorizeWithdrawItem((Context) any, (Item) any);
|
result = null;
|
||||||
result = null;
|
}};
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
it.withdraw();
|
it.withdraw();
|
||||||
assertTrue("testWithdrawAuth 0", it.isWithdrawn());
|
assertTrue("testWithdrawAuth 0", it.isWithdrawn());
|
||||||
@@ -1181,14 +1160,13 @@ public class ItemTest extends AbstractDSpaceObjectTest
|
|||||||
@Test(expected=AuthorizeException.class)
|
@Test(expected=AuthorizeException.class)
|
||||||
public void testWithdrawNoAuth() throws Exception
|
public void testWithdrawNoAuth() throws Exception
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeUtil.class)
|
||||||
{
|
{{
|
||||||
AuthorizeUtil authManager;
|
// Disallow Item withdraw permissions
|
||||||
{
|
AuthorizeUtil.authorizeWithdrawItem((Context) any, (Item) any);
|
||||||
AuthorizeUtil.authorizeWithdrawItem((Context) any, (Item) any);
|
result = new AuthorizeException();
|
||||||
result = new AuthorizeException();
|
|
||||||
}
|
}};
|
||||||
};
|
|
||||||
|
|
||||||
it.withdraw();
|
it.withdraw();
|
||||||
fail("Exception expected");
|
fail("Exception expected");
|
||||||
@@ -1200,16 +1178,14 @@ public class ItemTest extends AbstractDSpaceObjectTest
|
|||||||
@Test
|
@Test
|
||||||
public void testReinstateAuth() throws Exception
|
public void testReinstateAuth() throws Exception
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeUtil.class)
|
||||||
{
|
{{
|
||||||
AuthorizeUtil authManager;
|
// Allow Item withdraw and reinstate permissions
|
||||||
{
|
AuthorizeUtil.authorizeWithdrawItem((Context) any, (Item) any);
|
||||||
AuthorizeUtil.authorizeWithdrawItem((Context) any, (Item) any);
|
result = null;
|
||||||
result = null;
|
AuthorizeUtil.authorizeReinstateItem((Context) any, (Item) any);
|
||||||
AuthorizeUtil.authorizeReinstateItem((Context) any, (Item) any);
|
result = null;
|
||||||
result = null;
|
}};
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
it.withdraw();
|
it.withdraw();
|
||||||
it.reinstate();
|
it.reinstate();
|
||||||
@@ -1222,20 +1198,19 @@ public class ItemTest extends AbstractDSpaceObjectTest
|
|||||||
@Test(expected=AuthorizeException.class)
|
@Test(expected=AuthorizeException.class)
|
||||||
public void testReinstateNoAuth() throws Exception
|
public void testReinstateNoAuth() throws Exception
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeUtil.class)
|
||||||
{
|
{{
|
||||||
AuthorizeUtil authManager;
|
// Allow Item withdraw permissions
|
||||||
{
|
AuthorizeUtil.authorizeWithdrawItem((Context) any, (Item) any);
|
||||||
AuthorizeUtil.authorizeWithdrawItem((Context) any, (Item) any);
|
result = null;
|
||||||
result = null;
|
// Disallow Item reinstate permissions
|
||||||
AuthorizeUtil.authorizeReinstateItem((Context) any, (Item) any);
|
AuthorizeUtil.authorizeReinstateItem((Context) any, (Item) any);
|
||||||
result = new AuthorizeException();
|
result = new AuthorizeException();
|
||||||
}
|
}};
|
||||||
};
|
|
||||||
|
|
||||||
it.withdraw();
|
it.withdraw();
|
||||||
it.reinstate();
|
it.reinstate();
|
||||||
fail("Exceotion expected");
|
fail("Exception expected");
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -1244,14 +1219,12 @@ public class ItemTest extends AbstractDSpaceObjectTest
|
|||||||
@Test
|
@Test
|
||||||
public void testDeleteAuth() throws Exception
|
public void testDeleteAuth() throws Exception
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
{
|
{{
|
||||||
AuthorizeManager authManager;
|
// Allow Item REMOVE perms
|
||||||
{
|
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
||||||
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
Constants.REMOVE, true); result = null;
|
||||||
Constants.REMOVE, true); result = null;
|
}};
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
int id = it.getID();
|
int id = it.getID();
|
||||||
it.delete();
|
it.delete();
|
||||||
@@ -1265,14 +1238,12 @@ public class ItemTest extends AbstractDSpaceObjectTest
|
|||||||
@Test(expected=AuthorizeException.class)
|
@Test(expected=AuthorizeException.class)
|
||||||
public void testDeleteNoAuth() throws Exception
|
public void testDeleteNoAuth() throws Exception
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
{
|
{{
|
||||||
AuthorizeManager authManager;
|
// Disallow Item REMOVE perms
|
||||||
{
|
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
||||||
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
Constants.REMOVE); result = new AuthorizeException();
|
||||||
Constants.REMOVE); result = new AuthorizeException();
|
}};
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
it.delete();
|
it.delete();
|
||||||
fail("Exception expected");
|
fail("Exception expected");
|
||||||
@@ -1297,14 +1268,12 @@ public class ItemTest extends AbstractDSpaceObjectTest
|
|||||||
@SuppressWarnings("ObjectEqualsNull")
|
@SuppressWarnings("ObjectEqualsNull")
|
||||||
public void testEquals() throws SQLException, AuthorizeException
|
public void testEquals() throws SQLException, AuthorizeException
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
{
|
{{
|
||||||
AuthorizeManager authManager;
|
// Allow Item ADD perms (needed to create an Item)
|
||||||
{
|
AuthorizeManager.authorizeActionBoolean((Context) any, (Item) any,
|
||||||
AuthorizeManager.authorizeActionBoolean((Context) any, (Item) any,
|
Constants.ADD); result = true;
|
||||||
Constants.ADD); result = true;
|
}};
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
assertFalse("testEquals 0",it.equals(null));
|
assertFalse("testEquals 0",it.equals(null));
|
||||||
assertFalse("testEquals 1",it.equals(Item.create(context)));
|
assertFalse("testEquals 1",it.equals(Item.create(context)));
|
||||||
@@ -1544,20 +1513,21 @@ public class ItemTest extends AbstractDSpaceObjectTest
|
|||||||
@Test
|
@Test
|
||||||
public void testCanEditBooleanAuth() throws Exception
|
public void testCanEditBooleanAuth() throws Exception
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
// Test Inheritance of permissions
|
||||||
{
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
AuthorizeManager authManager;
|
{{
|
||||||
{
|
// Allow Item WRITE perms
|
||||||
AuthorizeManager.authorizeActionBoolean((Context) any, (Item) any,
|
AuthorizeManager.authorizeActionBoolean((Context) any, (Item) any,
|
||||||
Constants.WRITE); result = true;
|
Constants.WRITE); result = true;
|
||||||
AuthorizeManager.authorizeActionBoolean((Context) any, (Community) any,
|
// Allow parent Community WRITE and ADD perms
|
||||||
Constants.WRITE,true); result = true;
|
AuthorizeManager.authorizeActionBoolean((Context) any, (Community) any,
|
||||||
AuthorizeManager.authorizeActionBoolean((Context) any, (Community) any,
|
Constants.WRITE,true); result = true;
|
||||||
Constants.ADD,true); result = true;
|
AuthorizeManager.authorizeActionBoolean((Context) any, (Community) any,
|
||||||
AuthorizeManager.authorizeAction((Context) any, (Collection) any,
|
Constants.ADD,true); result = true;
|
||||||
Constants.WRITE,true); result = null;
|
// Allow parent Collection WRITE perms
|
||||||
}
|
AuthorizeManager.authorizeAction((Context) any, (Collection) any,
|
||||||
};
|
Constants.WRITE,true); result = null;
|
||||||
|
}};
|
||||||
|
|
||||||
assertTrue("testCanEditBooleanAuth 0", it.canEdit());
|
assertTrue("testCanEditBooleanAuth 0", it.canEdit());
|
||||||
}
|
}
|
||||||
@@ -1568,20 +1538,21 @@ public class ItemTest extends AbstractDSpaceObjectTest
|
|||||||
@Test
|
@Test
|
||||||
public void testCanEditBooleanAuth2() throws Exception
|
public void testCanEditBooleanAuth2() throws Exception
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
// Test Inheritance of permissions
|
||||||
{
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
AuthorizeManager authManager;
|
{{
|
||||||
{
|
// Disallow Item WRITE perms
|
||||||
AuthorizeManager.authorizeActionBoolean((Context) any, (Item) any,
|
AuthorizeManager.authorizeActionBoolean((Context) any, (Item) any,
|
||||||
Constants.WRITE); result = false;
|
Constants.WRITE); result = false;
|
||||||
AuthorizeManager.authorizeActionBoolean((Context) any, (Community) any,
|
// Allow parent Community WRITE and ADD perms
|
||||||
Constants.WRITE,true); result = true;
|
AuthorizeManager.authorizeActionBoolean((Context) any, (Community) any,
|
||||||
AuthorizeManager.authorizeActionBoolean((Context) any, (Community) any,
|
Constants.WRITE,true); result = true;
|
||||||
Constants.ADD,true); result = true;
|
AuthorizeManager.authorizeActionBoolean((Context) any, (Community) any,
|
||||||
AuthorizeManager.authorizeAction((Context) any, (Collection) any,
|
Constants.ADD,true); result = true;
|
||||||
Constants.WRITE,true); result = null;
|
// Allow parent Collection WRITE perms
|
||||||
}
|
AuthorizeManager.authorizeAction((Context) any, (Collection) any,
|
||||||
};
|
Constants.WRITE,true); result = null;
|
||||||
|
}};
|
||||||
|
|
||||||
assertTrue("testCanEditBooleanAuth2 0", it.canEdit());
|
assertTrue("testCanEditBooleanAuth2 0", it.canEdit());
|
||||||
}
|
}
|
||||||
@@ -1592,26 +1563,50 @@ public class ItemTest extends AbstractDSpaceObjectTest
|
|||||||
@Test
|
@Test
|
||||||
public void testCanEditBooleanAuth3() throws Exception
|
public void testCanEditBooleanAuth3() throws Exception
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
// Test Inheritance of permissions for owning collection
|
||||||
{
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
AuthorizeManager authManager;
|
{{
|
||||||
{
|
// Disallow Item WRITE perms
|
||||||
AuthorizeManager.authorizeActionBoolean((Context) any, (Item) any,
|
AuthorizeManager.authorizeActionBoolean((Context) any, (Item) any,
|
||||||
Constants.WRITE); result = false;
|
Constants.WRITE); result = false;
|
||||||
AuthorizeManager.authorizeActionBoolean((Context) any, (Community) any,
|
// Allow parent Collection WRITE perms
|
||||||
Constants.WRITE,true); result = true;
|
AuthorizeManager.authorizeAction((Context) any, (Collection) any,
|
||||||
AuthorizeManager.authorizeActionBoolean((Context) any, (Community) any,
|
Constants.WRITE, false); result = null;
|
||||||
Constants.ADD,true); result = true;
|
}};
|
||||||
AuthorizeManager.authorizeAction((Context) any, (Collection) any,
|
|
||||||
Constants.WRITE,true); result = null;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
|
// Create a new Collection and assign it as the owner
|
||||||
context.turnOffAuthorisationSystem();
|
context.turnOffAuthorisationSystem();
|
||||||
Collection c = Collection.create(context);
|
Collection c = Collection.create(context);
|
||||||
it.setOwningCollection(c);
|
it.setOwningCollection(c);
|
||||||
context.restoreAuthSystemState();
|
context.restoreAuthSystemState();
|
||||||
|
|
||||||
|
// Ensure person with WRITE perms on the Collection can edit item
|
||||||
|
assertTrue("testCanEditBooleanAuth3 0", it.canEdit());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Test of canEditBoolean method, of class Collection.
|
||||||
|
*/
|
||||||
|
@Test
|
||||||
|
public void testCanEditBooleanAuth4() throws Exception
|
||||||
|
{
|
||||||
|
// Test Inheritance of permissions for Community Admins
|
||||||
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
|
{{
|
||||||
|
// Disallow Item WRITE perms
|
||||||
|
AuthorizeManager.authorizeActionBoolean((Context) any, (Item) any,
|
||||||
|
Constants.WRITE); result = false;
|
||||||
|
// Allow parent Community WRITE and ADD perms
|
||||||
|
AuthorizeManager.authorizeActionBoolean((Context) any, (Community) any,
|
||||||
|
Constants.WRITE,true); result = true;
|
||||||
|
AuthorizeManager.authorizeActionBoolean((Context) any, (Community) any,
|
||||||
|
Constants.ADD,true); result = true;
|
||||||
|
// Disallow parent Collection WRITE perms
|
||||||
|
AuthorizeManager.authorizeAction((Context) any, (Collection) any,
|
||||||
|
Constants.WRITE,true); result = new AuthorizeException();
|
||||||
|
}};
|
||||||
|
|
||||||
|
// Ensure person with WRITE perms on the Collection can edit item
|
||||||
assertTrue("testCanEditBooleanAuth3 0", it.canEdit());
|
assertTrue("testCanEditBooleanAuth3 0", it.canEdit());
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1621,20 +1616,21 @@ public class ItemTest extends AbstractDSpaceObjectTest
|
|||||||
@Test
|
@Test
|
||||||
public void testCanEditBooleanNoAuth() throws Exception
|
public void testCanEditBooleanNoAuth() throws Exception
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
// Test Inheritance of permissions
|
||||||
{
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
AuthorizeManager authManager;
|
{{
|
||||||
{
|
// Disallow Item WRITE perms
|
||||||
AuthorizeManager.authorizeActionBoolean((Context) any, (Item) any,
|
AuthorizeManager.authorizeActionBoolean((Context) any, (Item) any,
|
||||||
Constants.WRITE); result = false;
|
Constants.WRITE); result = false;
|
||||||
AuthorizeManager.authorizeActionBoolean((Context) any, (Community) any,
|
// Disallow parent Community WRITE and ADD perms
|
||||||
Constants.WRITE,anyBoolean); result = false;
|
AuthorizeManager.authorizeActionBoolean((Context) any, (Community) any,
|
||||||
AuthorizeManager.authorizeActionBoolean((Context) any, (Community) any,
|
Constants.WRITE,anyBoolean); result = false;
|
||||||
Constants.ADD,anyBoolean); result = false;
|
AuthorizeManager.authorizeActionBoolean((Context) any, (Community) any,
|
||||||
AuthorizeManager.authorizeAction((Context) any, (Collection) any,
|
Constants.ADD,anyBoolean); result = false;
|
||||||
Constants.WRITE,anyBoolean); result = new AuthorizeException();
|
// Disallow parent Collection WRITE perms
|
||||||
}
|
AuthorizeManager.authorizeAction((Context) any, (Collection) any,
|
||||||
};
|
Constants.WRITE,anyBoolean); result = new AuthorizeException();
|
||||||
|
}};
|
||||||
|
|
||||||
context.turnOffAuthorisationSystem();
|
context.turnOffAuthorisationSystem();
|
||||||
Collection c = Collection.create(context);
|
Collection c = Collection.create(context);
|
||||||
|
@@ -61,11 +61,9 @@ public class MetadataFieldTest extends AbstractUnitTest
|
|||||||
super.init();
|
super.init();
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
context.turnOffAuthorisationSystem();
|
|
||||||
this.mf = MetadataField.findByElement(context,
|
this.mf = MetadataField.findByElement(context,
|
||||||
MetadataSchema.DC_SCHEMA_ID, element, qualifier);
|
MetadataSchema.DC_SCHEMA_ID, element, qualifier);
|
||||||
this.mf.setScopeNote(scopeNote);
|
this.mf.setScopeNote(scopeNote);
|
||||||
context.restoreAuthSystemState();
|
|
||||||
}
|
}
|
||||||
catch (AuthorizeException ex)
|
catch (AuthorizeException ex)
|
||||||
{
|
{
|
||||||
@@ -189,13 +187,11 @@ public class MetadataFieldTest extends AbstractUnitTest
|
|||||||
@Test
|
@Test
|
||||||
public void testCreateAuth() throws Exception
|
public void testCreateAuth() throws Exception
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
{
|
{{
|
||||||
AuthorizeManager authManager;
|
// Allow full admin permissions
|
||||||
{
|
AuthorizeManager.isAdmin(context); result = true;
|
||||||
AuthorizeManager.isAdmin(context); result = true;
|
}};
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
String elem = "elem1";
|
String elem = "elem1";
|
||||||
String qual = "qual1";
|
String qual = "qual1";
|
||||||
@@ -215,13 +211,11 @@ public class MetadataFieldTest extends AbstractUnitTest
|
|||||||
@Test(expected=AuthorizeException.class)
|
@Test(expected=AuthorizeException.class)
|
||||||
public void testCreateNoAuth() throws Exception
|
public void testCreateNoAuth() throws Exception
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
{
|
{{
|
||||||
AuthorizeManager authManager;
|
// Disallow full admin permissions
|
||||||
{
|
AuthorizeManager.isAdmin(context); result = false;
|
||||||
AuthorizeManager.isAdmin(context); result = false;
|
}};
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
String elem = "elem1";
|
String elem = "elem1";
|
||||||
String qual = "qual1";
|
String qual = "qual1";
|
||||||
@@ -239,13 +233,11 @@ public class MetadataFieldTest extends AbstractUnitTest
|
|||||||
@Test(expected=NonUniqueMetadataException.class)
|
@Test(expected=NonUniqueMetadataException.class)
|
||||||
public void testCreateRepeated() throws Exception
|
public void testCreateRepeated() throws Exception
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
{
|
{{
|
||||||
AuthorizeManager authManager;
|
// Allow full admin permissions
|
||||||
{
|
AuthorizeManager.isAdmin(context); result = true;
|
||||||
AuthorizeManager.isAdmin(context); result = true;
|
}};
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
String elem = element;
|
String elem = element;
|
||||||
String qual = qualifier;
|
String qual = qualifier;
|
||||||
@@ -319,13 +311,11 @@ public class MetadataFieldTest extends AbstractUnitTest
|
|||||||
@Test
|
@Test
|
||||||
public void testUpdateAuth() throws Exception
|
public void testUpdateAuth() throws Exception
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
{
|
{{
|
||||||
AuthorizeManager authManager;
|
// Allow full admin permissions
|
||||||
{
|
AuthorizeManager.isAdmin(context); result = true;
|
||||||
AuthorizeManager.isAdmin(context); result = true;
|
}};
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
String elem = "elem2";
|
String elem = "elem2";
|
||||||
String qual = "qual2";
|
String qual = "qual2";
|
||||||
@@ -346,13 +336,11 @@ public class MetadataFieldTest extends AbstractUnitTest
|
|||||||
@Test(expected=AuthorizeException.class)
|
@Test(expected=AuthorizeException.class)
|
||||||
public void testUpdateNoAuth() throws Exception
|
public void testUpdateNoAuth() throws Exception
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
{
|
{{
|
||||||
AuthorizeManager authManager;
|
// Disallow full admin permissions
|
||||||
{
|
AuthorizeManager.isAdmin(context); result = false;
|
||||||
AuthorizeManager.isAdmin(context); result = false;
|
}};
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
String elem = "elem2";
|
String elem = "elem2";
|
||||||
String qual = "qual2";
|
String qual = "qual2";
|
||||||
@@ -370,13 +358,11 @@ public class MetadataFieldTest extends AbstractUnitTest
|
|||||||
@Test(expected=NonUniqueMetadataException.class)
|
@Test(expected=NonUniqueMetadataException.class)
|
||||||
public void testUpdateRepeated() throws Exception
|
public void testUpdateRepeated() throws Exception
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
{
|
{{
|
||||||
AuthorizeManager authManager;
|
// Allow full admin permissions
|
||||||
{
|
AuthorizeManager.isAdmin(context); result = true;
|
||||||
AuthorizeManager.isAdmin(context); result = true;
|
}};
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
String elem = element;
|
String elem = element;
|
||||||
String qual = qualifier;
|
String qual = qualifier;
|
||||||
@@ -396,13 +382,11 @@ public class MetadataFieldTest extends AbstractUnitTest
|
|||||||
@Test
|
@Test
|
||||||
public void testDeleteAuth() throws Exception
|
public void testDeleteAuth() throws Exception
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
{
|
{{
|
||||||
AuthorizeManager authManager;
|
// Allow full admin permissions
|
||||||
{
|
AuthorizeManager.isAdmin(context); result = true;
|
||||||
AuthorizeManager.isAdmin(context); result = true;
|
}};
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
String elem = "elem3";
|
String elem = "elem3";
|
||||||
String qual = "qual3";
|
String qual = "qual3";
|
||||||
@@ -425,13 +409,11 @@ public class MetadataFieldTest extends AbstractUnitTest
|
|||||||
@Test(expected=AuthorizeException.class)
|
@Test(expected=AuthorizeException.class)
|
||||||
public void testDeleteNoAuth() throws Exception
|
public void testDeleteNoAuth() throws Exception
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
{
|
{{
|
||||||
AuthorizeManager authManager;
|
// Disallow full admin permissions
|
||||||
{
|
AuthorizeManager.isAdmin(context); result = false;
|
||||||
AuthorizeManager.isAdmin(context); result = false;
|
}};
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
String elem = "elem3";
|
String elem = "elem3";
|
||||||
String qual = "qual3";
|
String qual = "qual3";
|
||||||
@@ -462,15 +444,11 @@ public class MetadataFieldTest extends AbstractUnitTest
|
|||||||
@Test
|
@Test
|
||||||
public void testFind() throws Exception
|
public void testFind() throws Exception
|
||||||
{
|
{
|
||||||
context.turnOffAuthorisationSystem();
|
|
||||||
|
|
||||||
mf.update(context);
|
|
||||||
int id = mf.getFieldID();
|
int id = mf.getFieldID();
|
||||||
|
|
||||||
MetadataField found = MetadataField.find(context, id);
|
MetadataField found = MetadataField.find(context, id);
|
||||||
assertThat("testFind 0",found, notNullValue());
|
assertThat("testFind 0",found, notNullValue());
|
||||||
assertThat("testFind 1",found.getFieldID(), equalTo(mf.getFieldID()));
|
assertThat("testFind 1",found.getFieldID(), equalTo(mf.getFieldID()));
|
||||||
context.restoreAuthSystemState();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
@@ -55,20 +55,6 @@ public class MetadataSchemaTest extends AbstractUnitTest
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* This method will be run after every test as per @After. It will
|
|
||||||
* clean resources initialized by the @Before methods.
|
|
||||||
*
|
|
||||||
* Other methods can be annotated with @After here or in subclasses
|
|
||||||
* but no execution order is guaranteed
|
|
||||||
*/
|
|
||||||
@After
|
|
||||||
@Override
|
|
||||||
public void destroy()
|
|
||||||
{
|
|
||||||
super.destroy();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test of getNamespace method, of class MetadataSchema.
|
* Test of getNamespace method, of class MetadataSchema.
|
||||||
*/
|
*/
|
||||||
@@ -138,13 +124,11 @@ public class MetadataSchemaTest extends AbstractUnitTest
|
|||||||
@Test
|
@Test
|
||||||
public void testCreateAuth() throws Exception
|
public void testCreateAuth() throws Exception
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
{
|
{{
|
||||||
AuthorizeManager authManager;
|
// Allow full admin permissions
|
||||||
{
|
AuthorizeManager.isAdmin(context); result = true;
|
||||||
AuthorizeManager.isAdmin(context); result = true;
|
}};
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
String namespace = "namespace";
|
String namespace = "namespace";
|
||||||
String name = "name";
|
String name = "name";
|
||||||
@@ -163,13 +147,11 @@ public class MetadataSchemaTest extends AbstractUnitTest
|
|||||||
@Test(expected=AuthorizeException.class)
|
@Test(expected=AuthorizeException.class)
|
||||||
public void testCreateNoAuth() throws Exception
|
public void testCreateNoAuth() throws Exception
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
{
|
{{
|
||||||
AuthorizeManager authManager;
|
// Disallow full admin permissions
|
||||||
{
|
AuthorizeManager.isAdmin(context); result = false;
|
||||||
AuthorizeManager.isAdmin(context); result = false;
|
}};
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
String namespace = "namespace";
|
String namespace = "namespace";
|
||||||
String name = "name";
|
String name = "name";
|
||||||
@@ -186,13 +168,11 @@ public class MetadataSchemaTest extends AbstractUnitTest
|
|||||||
@Test(expected=NonUniqueMetadataException.class)
|
@Test(expected=NonUniqueMetadataException.class)
|
||||||
public void testCreateRepeated() throws Exception
|
public void testCreateRepeated() throws Exception
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
{
|
{{
|
||||||
AuthorizeManager authManager;
|
// Allow full admin permissions
|
||||||
{
|
AuthorizeManager.isAdmin(context); result = true;
|
||||||
AuthorizeManager.isAdmin(context); result = true;
|
}};
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
String namespace = ms.getNamespace();
|
String namespace = ms.getNamespace();
|
||||||
String name = ms.getName();
|
String name = ms.getName();
|
||||||
@@ -221,13 +201,11 @@ public class MetadataSchemaTest extends AbstractUnitTest
|
|||||||
@Test
|
@Test
|
||||||
public void testUpdateAuth() throws Exception
|
public void testUpdateAuth() throws Exception
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
{
|
{{
|
||||||
AuthorizeManager authManager;
|
// Allow full admin permissions
|
||||||
{
|
AuthorizeManager.isAdmin(context); result = true;
|
||||||
AuthorizeManager.isAdmin(context); result = true;
|
}};
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
String namespace = "namespace2";
|
String namespace = "namespace2";
|
||||||
String name = "name2";
|
String name = "name2";
|
||||||
@@ -248,13 +226,11 @@ public class MetadataSchemaTest extends AbstractUnitTest
|
|||||||
@Test(expected=AuthorizeException.class)
|
@Test(expected=AuthorizeException.class)
|
||||||
public void testUpdateNoAuth() throws Exception
|
public void testUpdateNoAuth() throws Exception
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
{
|
{{
|
||||||
AuthorizeManager authManager;
|
// Disallow full admin permissions
|
||||||
{
|
AuthorizeManager.isAdmin(context); result = false;
|
||||||
AuthorizeManager.isAdmin(context); result = false;
|
}};
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
String namespace = "namespace2";
|
String namespace = "namespace2";
|
||||||
String name = "name2";
|
String name = "name2";
|
||||||
@@ -271,13 +247,11 @@ public class MetadataSchemaTest extends AbstractUnitTest
|
|||||||
@Test(expected=NonUniqueMetadataException.class)
|
@Test(expected=NonUniqueMetadataException.class)
|
||||||
public void testUpdateRepeated() throws Exception
|
public void testUpdateRepeated() throws Exception
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
{
|
{{
|
||||||
AuthorizeManager authManager;
|
// Allow full admin permissions
|
||||||
{
|
AuthorizeManager.isAdmin(context); result = true;
|
||||||
AuthorizeManager.isAdmin(context); result = true;
|
}};
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
String namespace = ms.getNamespace();
|
String namespace = ms.getNamespace();
|
||||||
String name = ms.getName();
|
String name = ms.getName();
|
||||||
@@ -296,13 +270,11 @@ public class MetadataSchemaTest extends AbstractUnitTest
|
|||||||
@Test
|
@Test
|
||||||
public void testDeleteAuth() throws Exception
|
public void testDeleteAuth() throws Exception
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
{
|
{{
|
||||||
AuthorizeManager authManager;
|
// Allow full admin permissions
|
||||||
{
|
AuthorizeManager.isAdmin(context); result = true;
|
||||||
AuthorizeManager.isAdmin(context); result = true;
|
}};
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
String namespace = "namespace3";
|
String namespace = "namespace3";
|
||||||
String name = "name3";
|
String name = "name3";
|
||||||
@@ -324,13 +296,11 @@ public class MetadataSchemaTest extends AbstractUnitTest
|
|||||||
@Test(expected=AuthorizeException.class)
|
@Test(expected=AuthorizeException.class)
|
||||||
public void testDeleteNoAuth() throws Exception
|
public void testDeleteNoAuth() throws Exception
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
{
|
{{
|
||||||
AuthorizeManager authManager;
|
// Disallow full admin permissions
|
||||||
{
|
AuthorizeManager.isAdmin(context); result = false;
|
||||||
AuthorizeManager.isAdmin(context); result = false;
|
}};
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
String namespace = "namespace3";
|
String namespace = "namespace3";
|
||||||
String name = "name3";
|
String name = "name3";
|
||||||
|
@@ -109,14 +109,12 @@ public class WorkspaceItemTest extends AbstractUnitTest
|
|||||||
@Test
|
@Test
|
||||||
public void testCreateAuth() throws Exception
|
public void testCreateAuth() throws Exception
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
{
|
{{
|
||||||
AuthorizeManager authManager;
|
// Allow Collection ADD perms
|
||||||
{
|
AuthorizeManager.authorizeAction((Context) any, (Collection) any,
|
||||||
AuthorizeManager.authorizeAction((Context) any, (Collection) any,
|
Constants.ADD); result = null;
|
||||||
Constants.ADD); result = null;
|
}};
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
Collection coll = null;
|
Collection coll = null;
|
||||||
boolean template = false;
|
boolean template = false;
|
||||||
@@ -145,14 +143,12 @@ public class WorkspaceItemTest extends AbstractUnitTest
|
|||||||
@Test(expected=AuthorizeException.class)
|
@Test(expected=AuthorizeException.class)
|
||||||
public void testCreateNoAuth() throws Exception
|
public void testCreateNoAuth() throws Exception
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
{
|
{{
|
||||||
AuthorizeManager authManager;
|
// Disallow Collection ADD perms
|
||||||
{
|
AuthorizeManager.authorizeAction((Context) any, (Collection) any,
|
||||||
AuthorizeManager.authorizeAction((Context) any, (Collection) any,
|
Constants.ADD); result = new AuthorizeException();
|
||||||
Constants.ADD); result = new AuthorizeException();
|
}};
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
Collection coll = null;
|
Collection coll = null;
|
||||||
boolean template = false;
|
boolean template = false;
|
||||||
@@ -322,14 +318,12 @@ public class WorkspaceItemTest extends AbstractUnitTest
|
|||||||
@Test
|
@Test
|
||||||
public void testDeleteWrapperAuth() throws Exception
|
public void testDeleteWrapperAuth() throws Exception
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
{
|
{{
|
||||||
AuthorizeManager authManager;
|
// Allow Item WRITE perms
|
||||||
{
|
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
||||||
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
Constants.WRITE); result = null;
|
||||||
Constants.WRITE); result = null;
|
}};
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
int itemid = wi.getItem().getID();
|
int itemid = wi.getItem().getID();
|
||||||
int id = wi.getID();
|
int id = wi.getID();
|
||||||
@@ -346,14 +340,12 @@ public class WorkspaceItemTest extends AbstractUnitTest
|
|||||||
@Test(expected=AuthorizeException.class)
|
@Test(expected=AuthorizeException.class)
|
||||||
public void testDeleteWrapperNoAuth() throws Exception
|
public void testDeleteWrapperNoAuth() throws Exception
|
||||||
{
|
{
|
||||||
new NonStrictExpectations()
|
new NonStrictExpectations(AuthorizeManager.class)
|
||||||
{
|
{{
|
||||||
AuthorizeManager authManager;
|
// Disallow Item WRITE perms
|
||||||
{
|
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
||||||
AuthorizeManager.authorizeAction((Context) any, (Item) any,
|
Constants.WRITE); result = new AuthorizeException();
|
||||||
Constants.WRITE); result = new AuthorizeException();
|
}};
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
wi.deleteWrapper();
|
wi.deleteWrapper();
|
||||||
fail("Exception expected");
|
fail("Exception expected");
|
||||||
|
@@ -8,8 +8,6 @@
|
|||||||
package org.dspace.content.authority;
|
package org.dspace.content.authority;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import mockit.UsingMocksAndStubs;
|
|
||||||
import org.dspace.MockConfigurationManager;
|
|
||||||
import org.dspace.core.PluginManager;
|
import org.dspace.core.PluginManager;
|
||||||
import static org.junit.Assert.assertEquals;
|
import static org.junit.Assert.assertEquals;
|
||||||
import static org.junit.Assert.assertNotNull;
|
import static org.junit.Assert.assertNotNull;
|
||||||
@@ -20,7 +18,6 @@ import org.junit.*;
|
|||||||
*
|
*
|
||||||
* @author mwood
|
* @author mwood
|
||||||
*/
|
*/
|
||||||
@UsingMocksAndStubs(value=MockConfigurationManager.class)
|
|
||||||
public class DSpaceControlledVocabularyTest
|
public class DSpaceControlledVocabularyTest
|
||||||
{
|
{
|
||||||
public DSpaceControlledVocabularyTest()
|
public DSpaceControlledVocabularyTest()
|
||||||
@@ -73,14 +70,7 @@ public class DSpaceControlledVocabularyTest
|
|||||||
{
|
{
|
||||||
System.out.println("getMatches");
|
System.out.println("getMatches");
|
||||||
|
|
||||||
// Set up the PluginManager
|
|
||||||
final String PLUGIN_INTERFACE = "org.dspace.content.authority.ChoiceAuthority";
|
final String PLUGIN_INTERFACE = "org.dspace.content.authority.ChoiceAuthority";
|
||||||
final String PLUGIN_NAME = "org.dspace.content.authority.DSpaceControlledVocabulary";
|
|
||||||
|
|
||||||
MockConfigurationManager.setProperty("dspace.dir",
|
|
||||||
System.getProperty("dspace.dir.static"));
|
|
||||||
MockConfigurationManager.setProperty(
|
|
||||||
"plugin.selfnamed." + PLUGIN_INTERFACE, PLUGIN_NAME);
|
|
||||||
|
|
||||||
// Ensure that 'id' attribute is optional
|
// Ensure that 'id' attribute is optional
|
||||||
String field = null; // not used
|
String field = null; // not used
|
||||||
@@ -89,6 +79,9 @@ public class DSpaceControlledVocabularyTest
|
|||||||
int start = 0;
|
int start = 0;
|
||||||
int limit = 0;
|
int limit = 0;
|
||||||
String locale = null;
|
String locale = null;
|
||||||
|
// This "farm" Controlled Vocab is included in TestEnvironment data
|
||||||
|
// (under /src/test/data/dspaceFolder/) and it should be auto-loaded
|
||||||
|
// by test configs in /src/test/data/dspace.cfg.more
|
||||||
DSpaceControlledVocabulary instance = (DSpaceControlledVocabulary)
|
DSpaceControlledVocabulary instance = (DSpaceControlledVocabulary)
|
||||||
PluginManager.getNamedPlugin(Class.forName(PLUGIN_INTERFACE), "farm");
|
PluginManager.getNamedPlugin(Class.forName(PLUGIN_INTERFACE), "farm");
|
||||||
assertNotNull(instance);
|
assertNotNull(instance);
|
||||||
|
@@ -21,11 +21,6 @@ import org.dspace.event.Event;
|
|||||||
public class MockIndexEventConsumer
|
public class MockIndexEventConsumer
|
||||||
extends MockUp<IndexEventConsumer>
|
extends MockUp<IndexEventConsumer>
|
||||||
{
|
{
|
||||||
|
|
||||||
//public void initialize() throws Exception {
|
|
||||||
//do nothing
|
|
||||||
//}
|
|
||||||
|
|
||||||
@Mock
|
@Mock
|
||||||
public void consume(Context ctx, Event event) throws Exception {
|
public void consume(Context ctx, Event event) throws Exception {
|
||||||
//do nothing - Solr is not running during unit testing, so we cannot index test content in Solr
|
//do nothing - Solr is not running during unit testing, so we cannot index test content in Solr
|
||||||
@@ -35,8 +30,4 @@ public class MockIndexEventConsumer
|
|||||||
public void end(Context ctx) throws Exception {
|
public void end(Context ctx) throws Exception {
|
||||||
//do nothing - Solr is not running during unit testing, so we cannot index test content in Solr
|
//do nothing - Solr is not running during unit testing, so we cannot index test content in Solr
|
||||||
}
|
}
|
||||||
|
|
||||||
//public void finish(Context ctx) throws Exception {
|
|
||||||
//do nothing
|
|
||||||
//}
|
|
||||||
}
|
}
|
||||||
|
@@ -10,15 +10,10 @@ package org.dspace.eperson;
|
|||||||
|
|
||||||
import java.sql.SQLException;
|
import java.sql.SQLException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import mockit.UsingMocksAndStubs;
|
|
||||||
import org.apache.commons.codec.DecoderException;
|
import org.apache.commons.codec.DecoderException;
|
||||||
import org.dspace.MockConfigurationManager;
|
import org.dspace.AbstractUnitTest;
|
||||||
import org.dspace.core.Constants;
|
import org.dspace.core.Constants;
|
||||||
import org.dspace.core.Context;
|
import org.dspace.core.Context;
|
||||||
import org.dspace.servicemanager.DSpaceKernelImpl;
|
|
||||||
import org.dspace.servicemanager.DSpaceKernelInit;
|
|
||||||
import org.dspace.services.ConfigurationService;
|
|
||||||
import org.dspace.storage.rdbms.MockDatabaseManager;
|
|
||||||
import org.dspace.storage.rdbms.TableRow;
|
import org.dspace.storage.rdbms.TableRow;
|
||||||
import org.junit.*;
|
import org.junit.*;
|
||||||
import static org.junit.Assert.*;
|
import static org.junit.Assert.*;
|
||||||
@@ -27,23 +22,27 @@ import static org.junit.Assert.*;
|
|||||||
*
|
*
|
||||||
* @author mwood
|
* @author mwood
|
||||||
*/
|
*/
|
||||||
@UsingMocksAndStubs(value={MockDatabaseManager.class, MockConfigurationManager.class})
|
public class EPersonTest extends AbstractUnitTest
|
||||||
public class EPersonTest
|
|
||||||
{
|
{
|
||||||
private static TableRow row1;
|
private static TableRow row1;
|
||||||
|
|
||||||
private static DSpaceKernelImpl kernel;
|
|
||||||
|
|
||||||
private static ConfigurationService config;
|
|
||||||
|
|
||||||
public EPersonTest()
|
public EPersonTest()
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
@BeforeClass
|
/**
|
||||||
public static void setUpClass()
|
* This method will be run before every test as per @Before. It will
|
||||||
throws Exception
|
* initialize resources required for the tests.
|
||||||
|
*
|
||||||
|
* Other methods can be annotated with @Before here or in subclasses
|
||||||
|
* but no execution order is guaranteed
|
||||||
|
*/
|
||||||
|
@Before
|
||||||
|
@Override
|
||||||
|
public void init()
|
||||||
{
|
{
|
||||||
|
super.init();
|
||||||
|
|
||||||
// Build a TableRow for an EPerson to wrap
|
// Build a TableRow for an EPerson to wrap
|
||||||
final ArrayList<String> epersonColumns = new ArrayList<String>();
|
final ArrayList<String> epersonColumns = new ArrayList<String>();
|
||||||
epersonColumns.add("eperson_id");
|
epersonColumns.add("eperson_id");
|
||||||
@@ -51,32 +50,7 @@ public class EPersonTest
|
|||||||
epersonColumns.add("salt");
|
epersonColumns.add("salt");
|
||||||
epersonColumns.add("digest_algorithm");
|
epersonColumns.add("digest_algorithm");
|
||||||
|
|
||||||
row1 = new TableRow("EPerson", epersonColumns);
|
row1 = new TableRow("EPerson", epersonColumns);
|
||||||
|
|
||||||
// Make certain that a default DSpaceKernel is started.
|
|
||||||
kernel = DSpaceKernelInit.getKernel(null);
|
|
||||||
kernel.start();
|
|
||||||
|
|
||||||
// Configure the kernel
|
|
||||||
config = kernel.getConfigurationService();
|
|
||||||
config.setProperty("db.name", "H2");
|
|
||||||
config.setProperty("db.driver", "org.h2.Driver");
|
|
||||||
}
|
|
||||||
|
|
||||||
@AfterClass
|
|
||||||
public static void tearDownClass()
|
|
||||||
throws Exception
|
|
||||||
{
|
|
||||||
}
|
|
||||||
|
|
||||||
@Before
|
|
||||||
public void setUp()
|
|
||||||
{
|
|
||||||
}
|
|
||||||
|
|
||||||
@After
|
|
||||||
public void tearDown()
|
|
||||||
{
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -689,10 +663,8 @@ public class EPersonTest
|
|||||||
public void testCheckPassword()
|
public void testCheckPassword()
|
||||||
throws SQLException, DecoderException
|
throws SQLException, DecoderException
|
||||||
{
|
{
|
||||||
System.out.println("checkPassword");
|
|
||||||
final String attempt = "secret";
|
final String attempt = "secret";
|
||||||
Context ctx = new Context();
|
EPerson instance = new EPerson(context, row1);
|
||||||
EPerson instance = new EPerson(ctx, row1);
|
|
||||||
|
|
||||||
// Test old unsalted MD5 hash
|
// Test old unsalted MD5 hash
|
||||||
final String hash = "5ebe2294ecd0e0f08eab7690d2a6ee69"; // MD5("secret");
|
final String hash = "5ebe2294ecd0e0f08eab7690d2a6ee69"; // MD5("secret");
|
||||||
|
@@ -13,16 +13,13 @@ import java.util.ArrayList;
|
|||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Random;
|
import java.util.Random;
|
||||||
|
import org.apache.log4j.Logger;
|
||||||
import org.dspace.AbstractUnitTest;
|
import org.dspace.AbstractUnitTest;
|
||||||
import org.dspace.authorize.AuthorizeException;
|
import org.dspace.authorize.AuthorizeException;
|
||||||
import org.dspace.content.*;
|
import org.dspace.content.*;
|
||||||
import org.dspace.core.Context;
|
|
||||||
import org.dspace.kernel.ServiceManager;
|
|
||||||
import org.dspace.services.ConfigurationService;
|
import org.dspace.services.ConfigurationService;
|
||||||
import org.dspace.storage.rdbms.DatabaseManager;
|
import org.dspace.storage.rdbms.DatabaseManager;
|
||||||
import org.dspace.storage.rdbms.TableRow;
|
import org.dspace.storage.rdbms.TableRow;
|
||||||
import org.dspace.workflow.WorkflowItem;
|
|
||||||
import org.dspace.workflow.WorkflowManager;
|
|
||||||
import org.junit.*;
|
import org.junit.*;
|
||||||
import static org.junit.Assert.*;
|
import static org.junit.Assert.*;
|
||||||
import static org.junit.Assume.*;
|
import static org.junit.Assume.*;
|
||||||
@@ -36,10 +33,12 @@ import static org.junit.Assume.*;
|
|||||||
public class DOIIdentifierProviderTest
|
public class DOIIdentifierProviderTest
|
||||||
extends AbstractUnitTest
|
extends AbstractUnitTest
|
||||||
{
|
{
|
||||||
|
/** log4j category */
|
||||||
|
private static final Logger log = Logger.getLogger(DOIIdentifierProviderTest.class);
|
||||||
|
|
||||||
private static final String PREFIX = "10.5072";
|
private static final String PREFIX = "10.5072";
|
||||||
private static final String NAMESPACE_SEPARATOR = "dspaceUnitTests-";
|
private static final String NAMESPACE_SEPARATOR = "dspaceUnitTests-";
|
||||||
|
|
||||||
private static ServiceManager sm = null;
|
|
||||||
private static ConfigurationService config = null;
|
private static ConfigurationService config = null;
|
||||||
|
|
||||||
private static Community community;
|
private static Community community;
|
||||||
@@ -47,13 +46,82 @@ public class DOIIdentifierProviderTest
|
|||||||
|
|
||||||
private static MockDOIConnector connector;
|
private static MockDOIConnector connector;
|
||||||
private DOIIdentifierProvider provider;
|
private DOIIdentifierProvider provider;
|
||||||
|
|
||||||
/** The most recently created test Item's ID */
|
|
||||||
private static int itemID;
|
|
||||||
|
|
||||||
public DOIIdentifierProviderTest()
|
public DOIIdentifierProviderTest()
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This method will be run before every test as per @Before. It will
|
||||||
|
* initialize resources required for the tests.
|
||||||
|
*
|
||||||
|
* Other methods can be annotated with @Before here or in subclasses
|
||||||
|
* but no execution order is guaranteed
|
||||||
|
*/
|
||||||
|
@Before
|
||||||
|
@Override
|
||||||
|
public void init()
|
||||||
|
{
|
||||||
|
super.init();
|
||||||
|
|
||||||
|
try
|
||||||
|
{
|
||||||
|
context.turnOffAuthorisationSystem();
|
||||||
|
// Create an environment for our test objects to live in.
|
||||||
|
community = Community.create(null, context);
|
||||||
|
community.setMetadata("name", "A Test Community");
|
||||||
|
community.update();
|
||||||
|
collection = community.createCollection();
|
||||||
|
collection.setMetadata("name", "A Test Collection");
|
||||||
|
collection.update();
|
||||||
|
//we need to commit the changes so we don't block the table for testing
|
||||||
|
context.restoreAuthSystemState();
|
||||||
|
context.commit();
|
||||||
|
|
||||||
|
config = kernelImpl.getConfigurationService();
|
||||||
|
// Configure the service under test.
|
||||||
|
config.setProperty(DOIIdentifierProvider.CFG_PREFIX, PREFIX);
|
||||||
|
config.setProperty(DOIIdentifierProvider.CFG_NAMESPACE_SEPARATOR,
|
||||||
|
NAMESPACE_SEPARATOR);
|
||||||
|
|
||||||
|
connector = new MockDOIConnector();
|
||||||
|
|
||||||
|
provider = new DOIIdentifierProvider();
|
||||||
|
provider.setConfigurationService(config);
|
||||||
|
provider.setDOIConnector(connector);
|
||||||
|
}
|
||||||
|
catch (AuthorizeException ex)
|
||||||
|
{
|
||||||
|
log.error("Authorization Error in init", ex);
|
||||||
|
fail("Authorization Error in init: " + ex.getMessage());
|
||||||
|
}
|
||||||
|
catch (SQLException ex)
|
||||||
|
{
|
||||||
|
log.error("SQL Error in init", ex);
|
||||||
|
fail("SQL Error in init: " + ex.getMessage());
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This method will be run after every test as per @After. It will
|
||||||
|
* clean resources initialized by the @Before methods.
|
||||||
|
*
|
||||||
|
* Other methods can be annotated with @After here or in subclasses
|
||||||
|
* but no execution order is guaranteed
|
||||||
|
*/
|
||||||
|
@After
|
||||||
|
@Override
|
||||||
|
public void destroy()
|
||||||
|
{
|
||||||
|
community = null;
|
||||||
|
collection = null;
|
||||||
|
connector.reset();
|
||||||
|
connector = null;
|
||||||
|
provider = null;
|
||||||
|
super.destroy();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
private static void dumpMetadata(Item eyetem)
|
private static void dumpMetadata(Item eyetem)
|
||||||
{
|
{
|
||||||
@@ -74,18 +142,14 @@ public class DOIIdentifierProviderTest
|
|||||||
* @throws AuthorizeException
|
* @throws AuthorizeException
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
private Item newItem(Context ctx)
|
private Item newItem()
|
||||||
throws SQLException, AuthorizeException, IOException
|
throws SQLException, AuthorizeException, IOException
|
||||||
{
|
{
|
||||||
ctx.turnOffAuthorisationSystem();
|
context.turnOffAuthorisationSystem();
|
||||||
ctx.setCurrentUser(eperson);
|
//Install a fresh item
|
||||||
|
WorkspaceItem wsItem = WorkspaceItem.create(context, collection, false);
|
||||||
|
Item item = InstallItem.installItem(context, wsItem);
|
||||||
|
|
||||||
WorkspaceItem wsItem = WorkspaceItem.create(ctx, collection, false);
|
|
||||||
|
|
||||||
WorkflowItem wfItem = WorkflowManager.start(ctx, wsItem);
|
|
||||||
WorkflowManager.advance(ctx, wfItem, ctx.getCurrentUser());
|
|
||||||
|
|
||||||
Item item = wfItem.getItem();
|
|
||||||
item.addMetadata("dc", "contributor", "author", null, "Author, A. N.");
|
item.addMetadata("dc", "contributor", "author", null, "Author, A. N.");
|
||||||
item.addMetadata("dc", "title", null, null, "A Test Object");
|
item.addMetadata("dc", "title", null, null, "A Test Object");
|
||||||
item.addMetadata("dc", "publisher", null, null, "DSpace Test Harness");
|
item.addMetadata("dc", "publisher", null, null, "DSpace Test Harness");
|
||||||
@@ -124,8 +188,9 @@ public class DOIIdentifierProviderTest
|
|||||||
remainder.toArray(new String[remainder.size()]));
|
remainder.toArray(new String[remainder.size()]));
|
||||||
|
|
||||||
item.update();
|
item.update();
|
||||||
ctx.commit();
|
//we need to commit the changes so we don't block the table for testing
|
||||||
ctx.restoreAuthSystemState();
|
context.restoreAuthSystemState();
|
||||||
|
context.commit();
|
||||||
|
|
||||||
return item;
|
return item;
|
||||||
}
|
}
|
||||||
@@ -148,6 +213,7 @@ public class DOIIdentifierProviderTest
|
|||||||
public String createDOI(Item item, Integer status, boolean metadata, String doi)
|
public String createDOI(Item item, Integer status, boolean metadata, String doi)
|
||||||
throws SQLException, IdentifierException, AuthorizeException
|
throws SQLException, IdentifierException, AuthorizeException
|
||||||
{
|
{
|
||||||
|
context.turnOffAuthorisationSystem();
|
||||||
// we need some random data. UUIDs would be bloated here
|
// we need some random data. UUIDs would be bloated here
|
||||||
Random random = new Random();
|
Random random = new Random();
|
||||||
if (null == doi)
|
if (null == doi)
|
||||||
@@ -181,69 +247,12 @@ public class DOIIdentifierProviderTest
|
|||||||
item.update();
|
item.update();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//we need to commit the changes so we don't block the table for testing
|
||||||
|
context.restoreAuthSystemState();
|
||||||
context.commit();
|
context.commit();
|
||||||
return doi;
|
return doi;
|
||||||
}
|
}
|
||||||
|
|
||||||
@BeforeClass
|
|
||||||
public static void setUpClass()
|
|
||||||
throws Exception
|
|
||||||
{
|
|
||||||
// Find the usual kernel services
|
|
||||||
sm = kernelImpl.getServiceManager();
|
|
||||||
Context ctx = new Context();
|
|
||||||
|
|
||||||
ctx.turnOffAuthorisationSystem();
|
|
||||||
ctx.setCurrentUser(eperson);
|
|
||||||
// Create an environment for our test objects to live in.
|
|
||||||
community = Community.create(null, ctx);
|
|
||||||
community.setMetadata("name", "A Test Community");
|
|
||||||
community.update();
|
|
||||||
collection = community.createCollection();
|
|
||||||
collection.setMetadata("name", "A Test Collection");
|
|
||||||
collection.update();
|
|
||||||
ctx.complete();
|
|
||||||
|
|
||||||
config = kernelImpl.getConfigurationService();
|
|
||||||
// Configure the service under test.
|
|
||||||
config.setProperty(DOIIdentifierProvider.CFG_PREFIX, PREFIX);
|
|
||||||
config.setProperty(DOIIdentifierProvider.CFG_NAMESPACE_SEPARATOR,
|
|
||||||
NAMESPACE_SEPARATOR);
|
|
||||||
// Don't try to send mail.
|
|
||||||
config.setProperty("mail.server.disabled", "true");
|
|
||||||
|
|
||||||
connector = new MockDOIConnector();
|
|
||||||
}
|
|
||||||
|
|
||||||
@AfterClass
|
|
||||||
public static void tearDownClass()
|
|
||||||
throws Exception
|
|
||||||
{
|
|
||||||
/*
|
|
||||||
System.out.print("Tearing down\n\n");
|
|
||||||
Context ctx = new Context();
|
|
||||||
dumpMetadata(Item.find(ctx, itemID));
|
|
||||||
*/
|
|
||||||
}
|
|
||||||
|
|
||||||
@Before
|
|
||||||
public void setUp()
|
|
||||||
{
|
|
||||||
context.setCurrentUser(eperson);
|
|
||||||
context.turnOffAuthorisationSystem();
|
|
||||||
|
|
||||||
provider = new DOIIdentifierProvider();
|
|
||||||
provider.setConfigurationService(config);
|
|
||||||
provider.setDOIConnector(connector);
|
|
||||||
}
|
|
||||||
|
|
||||||
@After
|
|
||||||
public void tearDown()
|
|
||||||
{
|
|
||||||
context.restoreAuthSystemState();
|
|
||||||
connector.reset();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test of supports method, of class DataCiteIdentifierProvider.
|
* Test of supports method, of class DataCiteIdentifierProvider.
|
||||||
*/
|
*/
|
||||||
@@ -294,10 +303,12 @@ public class DOIIdentifierProviderTest
|
|||||||
public void testStore_DOI_as_item_metadata()
|
public void testStore_DOI_as_item_metadata()
|
||||||
throws SQLException, AuthorizeException, IOException, IdentifierException
|
throws SQLException, AuthorizeException, IOException, IdentifierException
|
||||||
{
|
{
|
||||||
Item item = newItem(context);
|
Item item = newItem();
|
||||||
String doi = DOI.SCHEME + PREFIX + "/" + NAMESPACE_SEPARATOR
|
String doi = DOI.SCHEME + PREFIX + "/" + NAMESPACE_SEPARATOR
|
||||||
+ Long.toHexString(new Date().getTime());
|
+ Long.toHexString(new Date().getTime());
|
||||||
|
context.turnOffAuthorisationSystem();
|
||||||
provider.saveDOIToObject(context, item, doi);
|
provider.saveDOIToObject(context, item, doi);
|
||||||
|
context.restoreAuthSystemState();
|
||||||
|
|
||||||
DCValue[] metadata = item.getMetadata(DOIIdentifierProvider.MD_SCHEMA,
|
DCValue[] metadata = item.getMetadata(DOIIdentifierProvider.MD_SCHEMA,
|
||||||
DOIIdentifierProvider.DOI_ELEMENT,
|
DOIIdentifierProvider.DOI_ELEMENT,
|
||||||
@@ -318,17 +329,18 @@ public class DOIIdentifierProviderTest
|
|||||||
public void testGet_DOI_out_of_item_metadata()
|
public void testGet_DOI_out_of_item_metadata()
|
||||||
throws SQLException, AuthorizeException, IOException, IdentifierException
|
throws SQLException, AuthorizeException, IOException, IdentifierException
|
||||||
{
|
{
|
||||||
Item item = newItem(context);
|
Item item = newItem();
|
||||||
String doi = DOI.SCHEME + PREFIX + "/" + NAMESPACE_SEPARATOR
|
String doi = DOI.SCHEME + PREFIX + "/" + NAMESPACE_SEPARATOR
|
||||||
+ Long.toHexString(new Date().getTime());
|
+ Long.toHexString(new Date().getTime());
|
||||||
|
|
||||||
|
context.turnOffAuthorisationSystem();
|
||||||
item.addMetadata(DOIIdentifierProvider.MD_SCHEMA,
|
item.addMetadata(DOIIdentifierProvider.MD_SCHEMA,
|
||||||
DOIIdentifierProvider.DOI_ELEMENT,
|
DOIIdentifierProvider.DOI_ELEMENT,
|
||||||
DOIIdentifierProvider.DOI_QUALIFIER,
|
DOIIdentifierProvider.DOI_QUALIFIER,
|
||||||
null,
|
null,
|
||||||
DOI.DOIToExternalForm(doi));
|
DOI.DOIToExternalForm(doi));
|
||||||
item.update();
|
item.update();
|
||||||
context.commit();
|
context.restoreAuthSystemState();
|
||||||
|
|
||||||
assertTrue("Failed to recognize DOI in item metadata.",
|
assertTrue("Failed to recognize DOI in item metadata.",
|
||||||
doi.equals(DOIIdentifierProvider.getDOIOutOfObject(item)));
|
doi.equals(DOIIdentifierProvider.getDOIOutOfObject(item)));
|
||||||
@@ -338,19 +350,20 @@ public class DOIIdentifierProviderTest
|
|||||||
public void testRemove_DOI_from_item_metadata()
|
public void testRemove_DOI_from_item_metadata()
|
||||||
throws SQLException, AuthorizeException, IOException, IdentifierException
|
throws SQLException, AuthorizeException, IOException, IdentifierException
|
||||||
{
|
{
|
||||||
Item item = newItem(context);
|
Item item = newItem();
|
||||||
String doi = DOI.SCHEME + PREFIX + "/" + NAMESPACE_SEPARATOR
|
String doi = DOI.SCHEME + PREFIX + "/" + NAMESPACE_SEPARATOR
|
||||||
+ Long.toHexString(new Date().getTime());
|
+ Long.toHexString(new Date().getTime());
|
||||||
|
|
||||||
|
context.turnOffAuthorisationSystem();
|
||||||
item.addMetadata(DOIIdentifierProvider.MD_SCHEMA,
|
item.addMetadata(DOIIdentifierProvider.MD_SCHEMA,
|
||||||
DOIIdentifierProvider.DOI_ELEMENT,
|
DOIIdentifierProvider.DOI_ELEMENT,
|
||||||
DOIIdentifierProvider.DOI_QUALIFIER,
|
DOIIdentifierProvider.DOI_QUALIFIER,
|
||||||
null,
|
null,
|
||||||
DOI.DOIToExternalForm(doi));
|
DOI.DOIToExternalForm(doi));
|
||||||
item.update();
|
item.update();
|
||||||
context.commit();
|
|
||||||
|
|
||||||
provider.removeDOIFromObject(context, item, doi);
|
provider.removeDOIFromObject(context, item, doi);
|
||||||
|
context.restoreAuthSystemState();
|
||||||
|
|
||||||
DCValue[] metadata = item.getMetadata(DOIIdentifierProvider.MD_SCHEMA,
|
DCValue[] metadata = item.getMetadata(DOIIdentifierProvider.MD_SCHEMA,
|
||||||
DOIIdentifierProvider.DOI_ELEMENT,
|
DOIIdentifierProvider.DOI_ELEMENT,
|
||||||
@@ -372,7 +385,7 @@ public class DOIIdentifierProviderTest
|
|||||||
throws SQLException, AuthorizeException, IOException,
|
throws SQLException, AuthorizeException, IOException,
|
||||||
IllegalArgumentException, IdentifierException
|
IllegalArgumentException, IdentifierException
|
||||||
{
|
{
|
||||||
Item item = newItem(context);
|
Item item = newItem();
|
||||||
String doi = this.createDOI(item, DOIIdentifierProvider.IS_REGISTERED, false);
|
String doi = this.createDOI(item, DOIIdentifierProvider.IS_REGISTERED, false);
|
||||||
|
|
||||||
String retrievedDOI = DOIIdentifierProvider.getDOIByObject(context, item);
|
String retrievedDOI = DOIIdentifierProvider.getDOIByObject(context, item);
|
||||||
@@ -386,7 +399,7 @@ public class DOIIdentifierProviderTest
|
|||||||
throws SQLException, AuthorizeException, IOException,
|
throws SQLException, AuthorizeException, IOException,
|
||||||
IllegalArgumentException, IdentifierException
|
IllegalArgumentException, IdentifierException
|
||||||
{
|
{
|
||||||
Item item = newItem(context);
|
Item item = newItem();
|
||||||
String doi = this.createDOI(item, DOIIdentifierProvider.IS_REGISTERED, false);
|
String doi = this.createDOI(item, DOIIdentifierProvider.IS_REGISTERED, false);
|
||||||
|
|
||||||
String retrievedDOI = provider.lookup(context, (DSpaceObject) item);
|
String retrievedDOI = provider.lookup(context, (DSpaceObject) item);
|
||||||
@@ -400,7 +413,7 @@ public class DOIIdentifierProviderTest
|
|||||||
throws SQLException, AuthorizeException, IOException,
|
throws SQLException, AuthorizeException, IOException,
|
||||||
IllegalArgumentException, IdentifierException
|
IllegalArgumentException, IdentifierException
|
||||||
{
|
{
|
||||||
Item item = newItem(context);
|
Item item = newItem();
|
||||||
String doi = this.createDOI(item, DOIIdentifierProvider.IS_REGISTERED, false);
|
String doi = this.createDOI(item, DOIIdentifierProvider.IS_REGISTERED, false);
|
||||||
|
|
||||||
DSpaceObject dso = DOIIdentifierProvider.getObjectByDOI(context, doi);
|
DSpaceObject dso = DOIIdentifierProvider.getObjectByDOI(context, doi);
|
||||||
@@ -417,7 +430,7 @@ public class DOIIdentifierProviderTest
|
|||||||
throws SQLException, AuthorizeException, IOException,
|
throws SQLException, AuthorizeException, IOException,
|
||||||
IllegalArgumentException, IdentifierException
|
IllegalArgumentException, IdentifierException
|
||||||
{
|
{
|
||||||
Item item = newItem(context);
|
Item item = newItem();
|
||||||
String doi = this.createDOI(item, DOIIdentifierProvider.IS_REGISTERED, false);
|
String doi = this.createDOI(item, DOIIdentifierProvider.IS_REGISTERED, false);
|
||||||
|
|
||||||
DSpaceObject dso = provider.resolve(context, doi);
|
DSpaceObject dso = provider.resolve(context, doi);
|
||||||
@@ -438,12 +451,14 @@ public class DOIIdentifierProviderTest
|
|||||||
throws SQLException, AuthorizeException, IOException, IdentifierException
|
throws SQLException, AuthorizeException, IOException, IdentifierException
|
||||||
{
|
{
|
||||||
// add two DOIs.
|
// add two DOIs.
|
||||||
Item item = newItem(context);
|
Item item = newItem();
|
||||||
String doi1 = this.createDOI(item, DOIIdentifierProvider.IS_REGISTERED, true);
|
String doi1 = this.createDOI(item, DOIIdentifierProvider.IS_REGISTERED, true);
|
||||||
String doi2 = this.createDOI(item, DOIIdentifierProvider.IS_REGISTERED, true);
|
String doi2 = this.createDOI(item, DOIIdentifierProvider.IS_REGISTERED, true);
|
||||||
|
|
||||||
// remove one of it
|
// remove one of it
|
||||||
|
context.turnOffAuthorisationSystem();
|
||||||
provider.removeDOIFromObject(context, item, doi1);
|
provider.removeDOIFromObject(context, item, doi1);
|
||||||
|
context.restoreAuthSystemState();
|
||||||
|
|
||||||
// assure that the right one was removed
|
// assure that the right one was removed
|
||||||
DCValue[] metadata = item.getMetadata(DOIIdentifierProvider.MD_SCHEMA,
|
DCValue[] metadata = item.getMetadata(DOIIdentifierProvider.MD_SCHEMA,
|
||||||
@@ -468,7 +483,9 @@ public class DOIIdentifierProviderTest
|
|||||||
assertTrue("Removed wrong DOI from item metadata.", foundDOI2);
|
assertTrue("Removed wrong DOI from item metadata.", foundDOI2);
|
||||||
|
|
||||||
// remove the otherone as well.
|
// remove the otherone as well.
|
||||||
|
context.turnOffAuthorisationSystem();
|
||||||
provider.removeDOIFromObject(context, item, doi2);
|
provider.removeDOIFromObject(context, item, doi2);
|
||||||
|
context.restoreAuthSystemState();
|
||||||
|
|
||||||
// check it
|
// check it
|
||||||
metadata = item.getMetadata(DOIIdentifierProvider.MD_SCHEMA,
|
metadata = item.getMetadata(DOIIdentifierProvider.MD_SCHEMA,
|
||||||
@@ -496,7 +513,7 @@ public class DOIIdentifierProviderTest
|
|||||||
@Test
|
@Test
|
||||||
public void testMintDOI() throws SQLException, AuthorizeException, IOException
|
public void testMintDOI() throws SQLException, AuthorizeException, IOException
|
||||||
{
|
{
|
||||||
Item item = newItem(context);
|
Item item = newItem();
|
||||||
String doi = null;
|
String doi = null;
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
@@ -527,7 +544,7 @@ public class DOIIdentifierProviderTest
|
|||||||
public void testMint_returns_existing_DOI()
|
public void testMint_returns_existing_DOI()
|
||||||
throws SQLException, AuthorizeException, IOException, IdentifierException
|
throws SQLException, AuthorizeException, IOException, IdentifierException
|
||||||
{
|
{
|
||||||
Item item = newItem(context);
|
Item item = newItem();
|
||||||
String doi = this.createDOI(item, null, true);
|
String doi = this.createDOI(item, null, true);
|
||||||
|
|
||||||
String retrievedDOI = provider.mint(context, item);
|
String retrievedDOI = provider.mint(context, item);
|
||||||
@@ -541,7 +558,7 @@ public class DOIIdentifierProviderTest
|
|||||||
throws SQLException, SQLException, AuthorizeException, IOException,
|
throws SQLException, SQLException, AuthorizeException, IOException,
|
||||||
IdentifierException
|
IdentifierException
|
||||||
{
|
{
|
||||||
Item item = newItem(context);
|
Item item = newItem();
|
||||||
String doi = this.createDOI(item, null, true);
|
String doi = this.createDOI(item, null, true);
|
||||||
|
|
||||||
provider.reserve(context, item, doi);
|
provider.reserve(context, item, doi);
|
||||||
@@ -559,7 +576,7 @@ public class DOIIdentifierProviderTest
|
|||||||
throws SQLException, SQLException, AuthorizeException, IOException,
|
throws SQLException, SQLException, AuthorizeException, IOException,
|
||||||
IdentifierException
|
IdentifierException
|
||||||
{
|
{
|
||||||
Item item = newItem(context);
|
Item item = newItem();
|
||||||
String doi = this.createDOI(item, null, true);
|
String doi = this.createDOI(item, null, true);
|
||||||
|
|
||||||
provider.register(context, item, doi);
|
provider.register(context, item, doi);
|
||||||
@@ -577,7 +594,7 @@ public class DOIIdentifierProviderTest
|
|||||||
throws SQLException, SQLException, AuthorizeException, IOException,
|
throws SQLException, SQLException, AuthorizeException, IOException,
|
||||||
IdentifierException
|
IdentifierException
|
||||||
{
|
{
|
||||||
Item item = newItem(context);
|
Item item = newItem();
|
||||||
String doi = this.createDOI(item, DOIIdentifierProvider.IS_RESERVED, true);
|
String doi = this.createDOI(item, DOIIdentifierProvider.IS_RESERVED, true);
|
||||||
|
|
||||||
provider.register(context, item, doi);
|
provider.register(context, item, doi);
|
||||||
@@ -595,7 +612,7 @@ public class DOIIdentifierProviderTest
|
|||||||
throws SQLException, SQLException, AuthorizeException, IOException,
|
throws SQLException, SQLException, AuthorizeException, IOException,
|
||||||
IdentifierException
|
IdentifierException
|
||||||
{
|
{
|
||||||
Item item = newItem(context);
|
Item item = newItem();
|
||||||
|
|
||||||
String doi = provider.register(context, item);
|
String doi = provider.register(context, item);
|
||||||
|
|
||||||
@@ -616,12 +633,14 @@ public class DOIIdentifierProviderTest
|
|||||||
public void testDelete_specified_DOI()
|
public void testDelete_specified_DOI()
|
||||||
throws SQLException, AuthorizeException, IOException, IdentifierException
|
throws SQLException, AuthorizeException, IOException, IdentifierException
|
||||||
{
|
{
|
||||||
Item item = newItem(context);
|
Item item = newItem();
|
||||||
String doi1 = this.createDOI(item, DOIIdentifierProvider.IS_REGISTERED, true);
|
String doi1 = this.createDOI(item, DOIIdentifierProvider.IS_REGISTERED, true);
|
||||||
String doi2 = this.createDOI(item, DOIIdentifierProvider.IS_REGISTERED, true);
|
String doi2 = this.createDOI(item, DOIIdentifierProvider.IS_REGISTERED, true);
|
||||||
|
|
||||||
// remove one of it
|
// remove one of it
|
||||||
|
context.turnOffAuthorisationSystem();
|
||||||
provider.delete(context, item, doi1);
|
provider.delete(context, item, doi1);
|
||||||
|
context.restoreAuthSystemState();
|
||||||
|
|
||||||
// assure that the right one was removed
|
// assure that the right one was removed
|
||||||
DCValue[] metadata = item.getMetadata(DOIIdentifierProvider.MD_SCHEMA,
|
DCValue[] metadata = item.getMetadata(DOIIdentifierProvider.MD_SCHEMA,
|
||||||
@@ -661,12 +680,14 @@ public class DOIIdentifierProviderTest
|
|||||||
public void testDelete_all_DOIs()
|
public void testDelete_all_DOIs()
|
||||||
throws SQLException, AuthorizeException, IOException, IdentifierException
|
throws SQLException, AuthorizeException, IOException, IdentifierException
|
||||||
{
|
{
|
||||||
Item item = newItem(context);
|
Item item = newItem();
|
||||||
String doi1 = this.createDOI(item, DOIIdentifierProvider.IS_REGISTERED, true);
|
String doi1 = this.createDOI(item, DOIIdentifierProvider.IS_REGISTERED, true);
|
||||||
String doi2 = this.createDOI(item, DOIIdentifierProvider.IS_REGISTERED, true);
|
String doi2 = this.createDOI(item, DOIIdentifierProvider.IS_REGISTERED, true);
|
||||||
|
|
||||||
// remove one of it
|
// remove one of it
|
||||||
|
context.turnOffAuthorisationSystem();
|
||||||
provider.delete(context, item);
|
provider.delete(context, item);
|
||||||
|
context.restoreAuthSystemState();
|
||||||
|
|
||||||
// assure that the right one was removed
|
// assure that the right one was removed
|
||||||
DCValue[] metadata = item.getMetadata(DOIIdentifierProvider.MD_SCHEMA,
|
DCValue[] metadata = item.getMetadata(DOIIdentifierProvider.MD_SCHEMA,
|
||||||
|
@@ -70,13 +70,11 @@ public class EZIDIdentifierProviderTest
|
|||||||
throws SQLException, AuthorizeException, IOException
|
throws SQLException, AuthorizeException, IOException
|
||||||
{
|
{
|
||||||
ctx.turnOffAuthorisationSystem();
|
ctx.turnOffAuthorisationSystem();
|
||||||
ctx.setCurrentUser(eperson);
|
|
||||||
|
//Install a fresh item
|
||||||
// Create an Item to play with
|
WorkspaceItem wsItem = WorkspaceItem.create(context, collection, false);
|
||||||
WorkspaceItem wsItem = WorkspaceItem.create(ctx, collection, false);
|
Item item = InstallItem.installItem(context, wsItem);
|
||||||
|
|
||||||
// Get it from the workspace and set some metadata
|
|
||||||
Item item = wsItem.getItem();
|
|
||||||
itemID = item.getID();
|
itemID = item.getID();
|
||||||
|
|
||||||
item.addMetadata("dc", "contributor", "author", null, "Author, A. N.");
|
item.addMetadata("dc", "contributor", "author", null, "Author, A. N.");
|
||||||
@@ -84,12 +82,6 @@ public class EZIDIdentifierProviderTest
|
|||||||
item.addMetadata("dc", "publisher", null, null, "DSpace Test Harness");
|
item.addMetadata("dc", "publisher", null, null, "DSpace Test Harness");
|
||||||
item.update();
|
item.update();
|
||||||
|
|
||||||
// I think we have to do this?
|
|
||||||
WorkflowItem wfItem = WorkflowManager.startWithoutNotify(ctx, wsItem);
|
|
||||||
WorkflowManager.advance(ctx, wfItem, ctx.getCurrentUser());
|
|
||||||
wfItem.update();
|
|
||||||
wfItem.deleteWrapper();
|
|
||||||
|
|
||||||
// Commit work, clean up
|
// Commit work, clean up
|
||||||
ctx.commit();
|
ctx.commit();
|
||||||
ctx.restoreAuthSystemState();
|
ctx.restoreAuthSystemState();
|
||||||
|
@@ -23,12 +23,6 @@ public class SpiderDetectorTest
|
|||||||
{
|
{
|
||||||
private static final String NOT_A_BOT_ADDRESS = "192.168.0.1";
|
private static final String NOT_A_BOT_ADDRESS = "192.168.0.1";
|
||||||
|
|
||||||
@BeforeClass
|
|
||||||
static public void beforeClass()
|
|
||||||
{
|
|
||||||
new MockSolrLogger();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test method for {@link org.dspace.statistics.util.SpiderDetector#readPatterns(java.io.File)}.
|
* Test method for {@link org.dspace.statistics.util.SpiderDetector#readPatterns(java.io.File)}.
|
||||||
*/
|
*/
|
||||||
|
File diff suppressed because it is too large
Load Diff
@@ -52,7 +52,7 @@
|
|||||||
<artifactId>mail</artifactId>
|
<artifactId>mail</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency> <!-- Keep jmockit before junit -->
|
<dependency> <!-- Keep jmockit before junit -->
|
||||||
<groupId>com.googlecode.jmockit</groupId>
|
<groupId>org.jmockit</groupId>
|
||||||
<artifactId>jmockit</artifactId>
|
<artifactId>jmockit</artifactId>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
@@ -1,3 +1,8 @@
|
|||||||
DSpace does NOT support the use of h2 in production. This directory is provided
|
DSpace does NOT support the use of the H2 Database (http://www.h2database.com/)
|
||||||
solely for testing during development. The test schema is kept here to
|
in Production. Instead, DSpace uses the H2 Database to perform Unit Testing
|
||||||
facilitate its maintenance as the production schemata evolve.
|
during development.
|
||||||
|
|
||||||
|
By default, the DSpace Unit Testing environment configures H2 to run in "Oracle
|
||||||
|
Mode" and initializes the H2 database using the Oracle schema (at
|
||||||
|
../oracle/database_schema.sql). In the future, if we find the Oracle schema is
|
||||||
|
unable to work for H2, we may need to (re)create an H2-specific database_schema.sql.
|
||||||
|
@@ -1,836 +0,0 @@
|
|||||||
--
|
|
||||||
-- The contents of this file are subject to the license and copyright
|
|
||||||
-- detailed in the LICENSE and NOTICE files at the root of the source
|
|
||||||
-- tree and available online at
|
|
||||||
--
|
|
||||||
-- http://www.dspace.org/license/
|
|
||||||
--
|
|
||||||
--
|
|
||||||
-- database_schema.sql
|
|
||||||
--
|
|
||||||
-- Version: $Revision: 4718 $
|
|
||||||
--
|
|
||||||
-- Date: $Date: 2010-01-21 20:28:22 +0000 (Thu, 21 Jan 2010) $
|
|
||||||
--
|
|
||||||
-- Copyright (c) 2002-2009, The DSpace Foundation. All rights reserved.
|
|
||||||
--
|
|
||||||
-- Redistribution and use in source and binary forms, with or without
|
|
||||||
-- modification, are permitted provided that the following conditions are
|
|
||||||
-- met:
|
|
||||||
--
|
|
||||||
-- - Redistributions of source code must retain the above copyright
|
|
||||||
-- notice, this list of conditions and the following disclaimer.
|
|
||||||
--
|
|
||||||
-- - Redistributions in binary form must reproduce the above copyright
|
|
||||||
-- notice, this list of conditions and the following disclaimer in the
|
|
||||||
-- documentation and/or other materials provided with the distribution.
|
|
||||||
--
|
|
||||||
-- - Neither the name of the DSpace Foundation nor the names of its
|
|
||||||
-- contributors may be used to endorse or promote products derived from
|
|
||||||
-- this software without specific prior written permission.
|
|
||||||
--
|
|
||||||
-- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
|
||||||
-- ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
|
||||||
-- LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
|
||||||
-- A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
|
||||||
-- HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
|
||||||
-- INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
|
|
||||||
-- BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
|
|
||||||
-- OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
|
||||||
-- ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
|
||||||
-- TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
|
||||||
-- USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
|
|
||||||
-- DAMAGE.
|
|
||||||
--
|
|
||||||
--
|
|
||||||
--
|
|
||||||
--
|
|
||||||
-- DSpace SQL schema
|
|
||||||
--
|
|
||||||
-- Authors: Peter Breton, Robert Tansley, David Stuve, Daniel Chudnov,
|
|
||||||
-- Richard Jones
|
|
||||||
--
|
|
||||||
-- This file is used as-is to initialize a database. Therefore,
|
|
||||||
-- table and view definitions must be ordered correctly.
|
|
||||||
--
|
|
||||||
-- Caution: THIS IS POSTGRESQL-SPECIFIC:
|
|
||||||
--
|
|
||||||
-- * SEQUENCES are used for automatic ID generation
|
|
||||||
-- * FUNCTION getnextid used for automatic ID generation
|
|
||||||
--
|
|
||||||
--
|
|
||||||
-- To convert to work with another database, you need to ensure
|
|
||||||
-- an SQL function 'getnextid', which takes a table name as an
|
|
||||||
-- argument, will return a safe new ID to use to create a new
|
|
||||||
-- row in that table.
|
|
||||||
|
|
||||||
-------------------------------------------------------
|
|
||||||
-- Function for obtaining new IDs.
|
|
||||||
--
|
|
||||||
-- * The argument is a table name
|
|
||||||
-- * It returns a new ID safe to use for that table
|
|
||||||
--
|
|
||||||
-- The function reads the next value from the sequence
|
|
||||||
-- 'tablename_seq'
|
|
||||||
-------------------------------------------------------
|
|
||||||
-- Commented as it is not compatible with H2
|
|
||||||
-- CREATE FUNCTION getnextid(VARCHAR(40)) RETURNS INTEGER AS
|
|
||||||
-- 'SELECT CAST (nextval($1 || ''_seq'') AS INTEGER) AS RESULT;' LANGUAGE SQL;
|
|
||||||
|
|
||||||
|
|
||||||
-------------------------------------------------------
|
|
||||||
-- Sequences for creating new IDs (primary keys) for
|
|
||||||
-- tables. Each table must have a corresponding
|
|
||||||
-- sequence called 'tablename_seq'.
|
|
||||||
-------------------------------------------------------
|
|
||||||
CREATE SEQUENCE bitstreamformatregistry_seq;
|
|
||||||
CREATE SEQUENCE fileextension_seq;
|
|
||||||
CREATE SEQUENCE bitstream_seq;
|
|
||||||
CREATE SEQUENCE eperson_seq;
|
|
||||||
CREATE SEQUENCE epersongroup_seq;
|
|
||||||
CREATE SEQUENCE item_seq;
|
|
||||||
CREATE SEQUENCE bundle_seq;
|
|
||||||
CREATE SEQUENCE item2bundle_seq;
|
|
||||||
CREATE SEQUENCE bundle2bitstream_seq;
|
|
||||||
CREATE SEQUENCE dctyperegistry_seq;
|
|
||||||
CREATE SEQUENCE dcvalue_seq;
|
|
||||||
CREATE SEQUENCE community_seq;
|
|
||||||
CREATE SEQUENCE collection_seq;
|
|
||||||
CREATE SEQUENCE community2community_seq;
|
|
||||||
CREATE SEQUENCE community2collection_seq;
|
|
||||||
CREATE SEQUENCE collection2item_seq;
|
|
||||||
CREATE SEQUENCE resourcepolicy_seq;
|
|
||||||
CREATE SEQUENCE epersongroup2eperson_seq;
|
|
||||||
CREATE SEQUENCE handle_seq;
|
|
||||||
CREATE SEQUENCE doi_seq;
|
|
||||||
CREATE SEQUENCE workspaceitem_seq;
|
|
||||||
CREATE SEQUENCE workflowitem_seq;
|
|
||||||
CREATE SEQUENCE tasklistitem_seq;
|
|
||||||
CREATE SEQUENCE registrationdata_seq;
|
|
||||||
CREATE SEQUENCE subscription_seq;
|
|
||||||
CREATE SEQUENCE communities2item_seq;
|
|
||||||
CREATE SEQUENCE epersongroup2workspaceitem_seq;
|
|
||||||
CREATE SEQUENCE metadataschemaregistry_seq;
|
|
||||||
CREATE SEQUENCE metadatafieldregistry_seq;
|
|
||||||
CREATE SEQUENCE metadatavalue_seq;
|
|
||||||
CREATE SEQUENCE group2group_seq;
|
|
||||||
CREATE SEQUENCE group2groupcache_seq;
|
|
||||||
CREATE SEQUENCE harvested_collection_seq;
|
|
||||||
CREATE SEQUENCE harvested_item_seq;
|
|
||||||
CREATE SEQUENCE webapp_seq;
|
|
||||||
|
|
||||||
-------------------------------------------------------
|
|
||||||
-- BitstreamFormatRegistry table
|
|
||||||
-------------------------------------------------------
|
|
||||||
CREATE TABLE BitstreamFormatRegistry
|
|
||||||
(
|
|
||||||
bitstream_format_id INTEGER PRIMARY KEY,
|
|
||||||
mimetype VARCHAR(256),
|
|
||||||
short_description VARCHAR(128),
|
|
||||||
description TEXT,
|
|
||||||
support_level INTEGER,
|
|
||||||
-- Identifies internal types
|
|
||||||
internal BOOL
|
|
||||||
);
|
|
||||||
|
|
||||||
-------------------------------------------------------
|
|
||||||
-- FileExtension table
|
|
||||||
-------------------------------------------------------
|
|
||||||
CREATE TABLE FileExtension
|
|
||||||
(
|
|
||||||
file_extension_id INTEGER PRIMARY KEY,
|
|
||||||
bitstream_format_id INTEGER REFERENCES BitstreamFormatRegistry(bitstream_format_id),
|
|
||||||
extension VARCHAR(16)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX fe_bitstream_fk_idx ON FileExtension(bitstream_format_id);
|
|
||||||
|
|
||||||
-------------------------------------------------------
|
|
||||||
-- Bitstream table
|
|
||||||
-------------------------------------------------------
|
|
||||||
CREATE TABLE Bitstream
|
|
||||||
(
|
|
||||||
bitstream_id INTEGER PRIMARY KEY,
|
|
||||||
bitstream_format_id INTEGER REFERENCES BitstreamFormatRegistry(bitstream_format_id),
|
|
||||||
name VARCHAR(256),
|
|
||||||
size_bytes BIGINT,
|
|
||||||
checksum VARCHAR(64),
|
|
||||||
checksum_algorithm VARCHAR(32),
|
|
||||||
description TEXT,
|
|
||||||
user_format_description TEXT,
|
|
||||||
source VARCHAR(256),
|
|
||||||
internal_id VARCHAR(256),
|
|
||||||
deleted BOOL,
|
|
||||||
store_number INTEGER,
|
|
||||||
sequence_id INTEGER
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX bit_bitstream_fk_idx ON Bitstream(bitstream_format_id);
|
|
||||||
|
|
||||||
-------------------------------------------------------
|
|
||||||
-- EPerson table
|
|
||||||
-------------------------------------------------------
|
|
||||||
CREATE TABLE EPerson
|
|
||||||
(
|
|
||||||
eperson_id INTEGER PRIMARY KEY,
|
|
||||||
email VARCHAR(64),
|
|
||||||
password VARCHAR(128),
|
|
||||||
salt VARCHAR(32),
|
|
||||||
digest_algorithm VARCHAR(16),
|
|
||||||
firstname VARCHAR(64),
|
|
||||||
lastname VARCHAR(64),
|
|
||||||
can_log_in BOOL,
|
|
||||||
require_certificate BOOL,
|
|
||||||
self_registered BOOL,
|
|
||||||
last_active TIMESTAMP,
|
|
||||||
sub_frequency INTEGER,
|
|
||||||
phone VARCHAR(32),
|
|
||||||
netid VARCHAR(64),
|
|
||||||
language VARCHAR(64)
|
|
||||||
);
|
|
||||||
|
|
||||||
-- index by email
|
|
||||||
CREATE INDEX eperson_email_idx ON EPerson(email);
|
|
||||||
|
|
||||||
-- index by netid
|
|
||||||
CREATE INDEX eperson_netid_idx ON EPerson(netid);
|
|
||||||
|
|
||||||
-------------------------------------------------------
|
|
||||||
-- EPersonGroup table
|
|
||||||
-------------------------------------------------------
|
|
||||||
CREATE TABLE EPersonGroup
|
|
||||||
(
|
|
||||||
eperson_group_id INTEGER PRIMARY KEY,
|
|
||||||
name VARCHAR(256)
|
|
||||||
);
|
|
||||||
|
|
||||||
------------------------------------------------------
|
|
||||||
-- Group2Group table, records group membership in other groups
|
|
||||||
------------------------------------------------------
|
|
||||||
CREATE TABLE Group2Group
|
|
||||||
(
|
|
||||||
id INTEGER PRIMARY KEY,
|
|
||||||
parent_id INTEGER REFERENCES EPersonGroup(eperson_group_id),
|
|
||||||
child_id INTEGER REFERENCES EPersonGroup(eperson_group_id)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX g2g_parent_fk_idx ON Group2Group(parent_id);
|
|
||||||
CREATE INDEX g2g_child_fk_idx ON Group2Group(child_id);
|
|
||||||
|
|
||||||
------------------------------------------------------
|
|
||||||
-- Group2GroupCache table, is the 'unwound' hierarchy in
|
|
||||||
-- Group2Group. It explicitly names every parent child
|
|
||||||
-- relationship, even with nested groups. For example,
|
|
||||||
-- If Group2Group lists B is a child of A and C is a child of B,
|
|
||||||
-- this table will have entries for parent(A,B), and parent(B,C)
|
|
||||||
-- AND parent(A,C) so that all of the child groups of A can be
|
|
||||||
-- looked up in a single simple query
|
|
||||||
------------------------------------------------------
|
|
||||||
CREATE TABLE Group2GroupCache
|
|
||||||
(
|
|
||||||
id INTEGER PRIMARY KEY,
|
|
||||||
parent_id INTEGER REFERENCES EPersonGroup(eperson_group_id),
|
|
||||||
child_id INTEGER REFERENCES EPersonGroup(eperson_group_id)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX g2gc_parent_fk_idx ON Group2Group(parent_id);
|
|
||||||
CREATE INDEX g2gc_child_fk_idx ON Group2Group(child_id);
|
|
||||||
|
|
||||||
-------------------------------------------------------
|
|
||||||
-- Item table
|
|
||||||
-------------------------------------------------------
|
|
||||||
CREATE TABLE Item
|
|
||||||
(
|
|
||||||
item_id INTEGER PRIMARY KEY,
|
|
||||||
submitter_id INTEGER REFERENCES EPerson(eperson_id),
|
|
||||||
in_archive BOOL,
|
|
||||||
withdrawn BOOL,
|
|
||||||
discoverable BOOL,
|
|
||||||
last_modified TIMESTAMP,
|
|
||||||
owning_collection INTEGER
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX item_submitter_fk_idx ON Item(submitter_id);
|
|
||||||
|
|
||||||
-------------------------------------------------------
|
|
||||||
-- Bundle table
|
|
||||||
-------------------------------------------------------
|
|
||||||
CREATE TABLE Bundle
|
|
||||||
(
|
|
||||||
bundle_id INTEGER PRIMARY KEY,
|
|
||||||
name VARCHAR(16),
|
|
||||||
primary_bitstream_id INTEGER REFERENCES Bitstream(bitstream_id)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX bundle_primary_fk_idx ON Bundle(primary_bitstream_id);
|
|
||||||
|
|
||||||
-------------------------------------------------------
|
|
||||||
-- Item2Bundle table
|
|
||||||
-------------------------------------------------------
|
|
||||||
CREATE TABLE Item2Bundle
|
|
||||||
(
|
|
||||||
id INTEGER PRIMARY KEY,
|
|
||||||
item_id INTEGER REFERENCES Item(item_id),
|
|
||||||
bundle_id INTEGER REFERENCES Bundle(bundle_id)
|
|
||||||
);
|
|
||||||
|
|
||||||
-- index by item_id
|
|
||||||
CREATE INDEX item2bundle_item_idx on Item2Bundle(item_id);
|
|
||||||
|
|
||||||
CREATE INDEX item2bundle_bundle_fk_idx ON Item2Bundle(bundle_id);
|
|
||||||
|
|
||||||
-------------------------------------------------------
|
|
||||||
-- Bundle2Bitstream table
|
|
||||||
-------------------------------------------------------
|
|
||||||
CREATE TABLE Bundle2Bitstream
|
|
||||||
(
|
|
||||||
id INTEGER PRIMARY KEY,
|
|
||||||
bundle_id INTEGER REFERENCES Bundle(bundle_id),
|
|
||||||
bitstream_id INTEGER REFERENCES Bitstream(bitstream_id),
|
|
||||||
bitstream_order INTEGER
|
|
||||||
);
|
|
||||||
|
|
||||||
-- index by bundle_id
|
|
||||||
CREATE INDEX bundle2bitstream_bundle_idx ON Bundle2Bitstream(bundle_id);
|
|
||||||
|
|
||||||
CREATE INDEX bundle2bitstream_bitstream_fk_idx ON Bundle2Bitstream(bitstream_id);
|
|
||||||
|
|
||||||
-------------------------------------------------------
|
|
||||||
-- Metadata Tables and Sequences
|
|
||||||
-------------------------------------------------------
|
|
||||||
CREATE TABLE MetadataSchemaRegistry
|
|
||||||
(
|
|
||||||
metadata_schema_id INTEGER PRIMARY KEY,
|
|
||||||
namespace VARCHAR(256),
|
|
||||||
short_id VARCHAR(32)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE MetadataFieldRegistry
|
|
||||||
(
|
|
||||||
metadata_field_id INTEGER PRIMARY KEY,
|
|
||||||
metadata_schema_id INTEGER NOT NULL REFERENCES MetadataSchemaRegistry(metadata_schema_id),
|
|
||||||
element VARCHAR(64),
|
|
||||||
qualifier VARCHAR(64),
|
|
||||||
scope_note TEXT
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE MetadataValue
|
|
||||||
(
|
|
||||||
metadata_value_id INTEGER PRIMARY KEY,
|
|
||||||
item_id INTEGER REFERENCES Item(item_id),
|
|
||||||
metadata_field_id INTEGER REFERENCES MetadataFieldRegistry(metadata_field_id),
|
|
||||||
text_value TEXT,
|
|
||||||
text_lang VARCHAR(24),
|
|
||||||
place INTEGER,
|
|
||||||
authority VARCHAR(100),
|
|
||||||
confidence INTEGER DEFAULT -1
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Create a dcvalue view for backwards compatibilty
|
|
||||||
CREATE VIEW dcvalue AS
|
|
||||||
SELECT MetadataValue.metadata_value_id AS "dc_value_id", MetadataValue.item_id,
|
|
||||||
MetadataValue.metadata_field_id AS "dc_type_id", MetadataValue.text_value,
|
|
||||||
MetadataValue.text_lang, MetadataValue.place
|
|
||||||
FROM MetadataValue, MetadataFieldRegistry
|
|
||||||
WHERE MetadataValue.metadata_field_id = MetadataFieldRegistry.metadata_field_id
|
|
||||||
AND MetadataFieldRegistry.metadata_schema_id = 1;
|
|
||||||
|
|
||||||
-- An index for item_id - almost all access is based on
|
|
||||||
-- instantiating the item object, which grabs all values
|
|
||||||
-- related to that item
|
|
||||||
CREATE INDEX metadatavalue_item_idx ON MetadataValue(item_id);
|
|
||||||
CREATE INDEX metadatavalue_item_idx2 ON MetadataValue(item_id,metadata_field_id);
|
|
||||||
CREATE INDEX metadatavalue_field_fk_idx ON MetadataValue(metadata_field_id);
|
|
||||||
CREATE INDEX metadatafield_schema_idx ON MetadataFieldRegistry(metadata_schema_id);
|
|
||||||
|
|
||||||
-------------------------------------------------------
|
|
||||||
-- Community table
|
|
||||||
-------------------------------------------------------
|
|
||||||
CREATE TABLE Community
|
|
||||||
(
|
|
||||||
community_id INTEGER PRIMARY KEY,
|
|
||||||
name VARCHAR(128),
|
|
||||||
short_description VARCHAR(512),
|
|
||||||
introductory_text TEXT,
|
|
||||||
logo_bitstream_id INTEGER REFERENCES Bitstream(bitstream_id),
|
|
||||||
copyright_text TEXT,
|
|
||||||
side_bar_text TEXT,
|
|
||||||
admin INTEGER REFERENCES EPersonGroup( eperson_group_id )
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX community_logo_fk_idx ON Community(logo_bitstream_id);
|
|
||||||
CREATE INDEX community_admin_fk_idx ON Community(admin);
|
|
||||||
|
|
||||||
-------------------------------------------------------
|
|
||||||
-- Collection table
|
|
||||||
-------------------------------------------------------
|
|
||||||
CREATE TABLE Collection
|
|
||||||
(
|
|
||||||
collection_id INTEGER PRIMARY KEY,
|
|
||||||
name VARCHAR(128),
|
|
||||||
short_description VARCHAR(512),
|
|
||||||
introductory_text TEXT,
|
|
||||||
logo_bitstream_id INTEGER REFERENCES Bitstream(bitstream_id),
|
|
||||||
template_item_id INTEGER REFERENCES Item(item_id),
|
|
||||||
provenance_description TEXT,
|
|
||||||
license TEXT,
|
|
||||||
copyright_text TEXT,
|
|
||||||
side_bar_text TEXT,
|
|
||||||
workflow_step_1 INTEGER REFERENCES EPersonGroup( eperson_group_id ),
|
|
||||||
workflow_step_2 INTEGER REFERENCES EPersonGroup( eperson_group_id ),
|
|
||||||
workflow_step_3 INTEGER REFERENCES EPersonGroup( eperson_group_id ),
|
|
||||||
submitter INTEGER REFERENCES EPersonGroup( eperson_group_id ),
|
|
||||||
admin INTEGER REFERENCES EPersonGroup( eperson_group_id )
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX collection_logo_fk_idx ON Collection(logo_bitstream_id);
|
|
||||||
CREATE INDEX collection_template_fk_idx ON Collection(template_item_id);
|
|
||||||
CREATE INDEX collection_workflow1_fk_idx ON Collection(workflow_step_1);
|
|
||||||
CREATE INDEX collection_workflow2_fk_idx ON Collection(workflow_step_2);
|
|
||||||
CREATE INDEX collection_workflow3_fk_idx ON Collection(workflow_step_3);
|
|
||||||
CREATE INDEX collection_submitter_fk_idx ON Collection(submitter);
|
|
||||||
CREATE INDEX collection_admin_fk_idx ON Collection(admin);
|
|
||||||
|
|
||||||
-------------------------------------------------------
|
|
||||||
-- Community2Community table
|
|
||||||
-------------------------------------------------------
|
|
||||||
CREATE TABLE Community2Community
|
|
||||||
(
|
|
||||||
id INTEGER PRIMARY KEY,
|
|
||||||
parent_comm_id INTEGER REFERENCES Community(community_id),
|
|
||||||
child_comm_id INTEGER,
|
|
||||||
CONSTRAINT com2com_child_fk FOREIGN KEY (child_comm_id) REFERENCES Community(community_id) DEFERRABLE
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX com2com_parent_fk_idx ON Community2Community(parent_comm_id);
|
|
||||||
CREATE INDEX com2com_child_fk_idx ON Community2Community(child_comm_id);
|
|
||||||
|
|
||||||
-------------------------------------------------------
|
|
||||||
-- Community2Collection table
|
|
||||||
-------------------------------------------------------
|
|
||||||
CREATE TABLE Community2Collection
|
|
||||||
(
|
|
||||||
id INTEGER PRIMARY KEY,
|
|
||||||
community_id INTEGER REFERENCES Community(community_id),
|
|
||||||
collection_id INTEGER,
|
|
||||||
CONSTRAINT comm2coll_collection_fk FOREIGN KEY (collection_id) REFERENCES Collection(collection_id) DEFERRABLE
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Index on community ID
|
|
||||||
CREATE INDEX Community2Collection_community_id_idx ON Community2Collection(community_id);
|
|
||||||
-- Index on collection ID
|
|
||||||
CREATE INDEX Community2Collection_collection_id_idx ON Community2Collection(collection_id);
|
|
||||||
|
|
||||||
-------------------------------------------------------
|
|
||||||
-- Collection2Item table
|
|
||||||
-------------------------------------------------------
|
|
||||||
CREATE TABLE Collection2Item
|
|
||||||
(
|
|
||||||
id INTEGER PRIMARY KEY,
|
|
||||||
collection_id INTEGER REFERENCES Collection(collection_id),
|
|
||||||
item_id INTEGER,
|
|
||||||
CONSTRAINT coll2item_item_fk FOREIGN KEY (item_id) REFERENCES Item(item_id) DEFERRABLE
|
|
||||||
);
|
|
||||||
|
|
||||||
-- index by collection_id
|
|
||||||
CREATE INDEX collection2item_collection_idx ON Collection2Item(collection_id);
|
|
||||||
-- and item_id
|
|
||||||
CREATE INDEX Collection2Item_item_id_idx ON Collection2Item( item_id );
|
|
||||||
|
|
||||||
-------------------------------------------------------
|
|
||||||
-- ResourcePolicy table
|
|
||||||
-------------------------------------------------------
|
|
||||||
CREATE TABLE ResourcePolicy
|
|
||||||
(
|
|
||||||
policy_id INTEGER PRIMARY KEY,
|
|
||||||
resource_type_id INTEGER,
|
|
||||||
resource_id INTEGER,
|
|
||||||
action_id INTEGER,
|
|
||||||
eperson_id INTEGER REFERENCES EPerson(eperson_id),
|
|
||||||
epersongroup_id INTEGER REFERENCES EPersonGroup(eperson_group_id),
|
|
||||||
start_date DATE,
|
|
||||||
end_date DATE,
|
|
||||||
rpname VARCHAR(30),
|
|
||||||
rptype VARCHAR(30),
|
|
||||||
rpdescription VARCHAR(100)
|
|
||||||
);
|
|
||||||
|
|
||||||
-- index by resource_type,resource_id - all queries by
|
|
||||||
-- authorization manager are select type=x, id=y, action=z
|
|
||||||
CREATE INDEX resourcepolicy_type_id_idx ON ResourcePolicy(resource_type_id,resource_id);
|
|
||||||
|
|
||||||
CREATE INDEX rp_eperson_fk_idx ON ResourcePolicy(eperson_id);
|
|
||||||
CREATE INDEX rp_epersongroup_fk_idx ON ResourcePolicy(epersongroup_id);
|
|
||||||
|
|
||||||
-------------------------------------------------------
|
|
||||||
-- EPersonGroup2EPerson table
|
|
||||||
-------------------------------------------------------
|
|
||||||
CREATE TABLE EPersonGroup2EPerson
|
|
||||||
(
|
|
||||||
id INTEGER PRIMARY KEY,
|
|
||||||
eperson_group_id INTEGER REFERENCES EPersonGroup(eperson_group_id),
|
|
||||||
eperson_id INTEGER REFERENCES EPerson(eperson_id)
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Index by group ID (used heavily by AuthorizeManager)
|
|
||||||
CREATE INDEX epersongroup2eperson_group_idx on EPersonGroup2EPerson(eperson_group_id);
|
|
||||||
|
|
||||||
CREATE INDEX epg2ep_eperson_fk_idx ON EPersonGroup2EPerson(eperson_id);
|
|
||||||
|
|
||||||
-------------------------------------------------------
|
|
||||||
-- Handle table
|
|
||||||
-------------------------------------------------------
|
|
||||||
CREATE TABLE Handle
|
|
||||||
(
|
|
||||||
handle_id INTEGER PRIMARY KEY,
|
|
||||||
handle VARCHAR(256),
|
|
||||||
resource_type_id INTEGER,
|
|
||||||
resource_id INTEGER
|
|
||||||
);
|
|
||||||
|
|
||||||
-- index by handle, commonly looked up
|
|
||||||
CREATE INDEX handle_handle_idx ON Handle(handle);
|
|
||||||
-- index by resource id and resource type id
|
|
||||||
CREATE INDEX handle_resource_id_and_type_idx ON handle(resource_id, resource_type_id);
|
|
||||||
|
|
||||||
-------------------------------------------------------
|
|
||||||
-- Doi table
|
|
||||||
-------------------------------------------------------
|
|
||||||
CREATE TABLE Doi
|
|
||||||
(
|
|
||||||
doi_id INTEGER PRIMARY KEY,
|
|
||||||
doi VARCHAR(256),
|
|
||||||
resource_type_id INTEGER,
|
|
||||||
resource_id INTEGER,
|
|
||||||
status INTEGER
|
|
||||||
);
|
|
||||||
|
|
||||||
-- index by handle, commonly looked up
|
|
||||||
CREATE INDEX doi_doi_idx ON Doi(doi);
|
|
||||||
-- index by resource id and resource type id
|
|
||||||
CREATE INDEX doi_resource_id_and_type_idx ON Doi(resource_id, resource_type_id);
|
|
||||||
|
|
||||||
-------------------------------------------------------
|
|
||||||
-- WorkspaceItem table
|
|
||||||
-------------------------------------------------------
|
|
||||||
CREATE TABLE WorkspaceItem
|
|
||||||
(
|
|
||||||
workspace_item_id INTEGER PRIMARY KEY,
|
|
||||||
item_id INTEGER REFERENCES Item(item_id),
|
|
||||||
collection_id INTEGER REFERENCES Collection(collection_id),
|
|
||||||
-- Answers to questions on first page of submit UI
|
|
||||||
multiple_titles BOOL,
|
|
||||||
published_before BOOL,
|
|
||||||
multiple_files BOOL,
|
|
||||||
-- How for the user has got in the submit process
|
|
||||||
stage_reached INTEGER,
|
|
||||||
page_reached INTEGER
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX workspace_item_fk_idx ON WorkspaceItem(item_id);
|
|
||||||
CREATE INDEX workspace_coll_fk_idx ON WorkspaceItem(collection_id);
|
|
||||||
|
|
||||||
-------------------------------------------------------
|
|
||||||
-- WorkflowItem table
|
|
||||||
-------------------------------------------------------
|
|
||||||
CREATE TABLE WorkflowItem
|
|
||||||
(
|
|
||||||
workflow_id INTEGER PRIMARY KEY,
|
|
||||||
item_id INTEGER REFERENCES Item(item_id),
|
|
||||||
collection_id INTEGER REFERENCES Collection(collection_id),
|
|
||||||
state INTEGER,
|
|
||||||
owner INTEGER REFERENCES EPerson(eperson_id),
|
|
||||||
|
|
||||||
-- Answers to questions on first page of submit UI
|
|
||||||
multiple_titles BOOL,
|
|
||||||
published_before BOOL,
|
|
||||||
multiple_files BOOL
|
|
||||||
-- Note: stage reached not applicable here - people involved in workflow
|
|
||||||
-- can always jump around submission UI
|
|
||||||
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX workflow_item_fk_idx ON WorkflowItem(item_id);
|
|
||||||
CREATE INDEX workflow_coll_fk_idx ON WorkflowItem(collection_id);
|
|
||||||
CREATE INDEX workflow_owner_fk_idx ON WorkflowItem(owner);
|
|
||||||
|
|
||||||
-------------------------------------------------------
|
|
||||||
-- TasklistItem table
|
|
||||||
-------------------------------------------------------
|
|
||||||
CREATE TABLE TasklistItem
|
|
||||||
(
|
|
||||||
tasklist_id INTEGER PRIMARY KEY,
|
|
||||||
eperson_id INTEGER REFERENCES EPerson(eperson_id),
|
|
||||||
workflow_id INTEGER REFERENCES WorkflowItem(workflow_id)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX tasklist_eperson_fk_idx ON TasklistItem(eperson_id);
|
|
||||||
CREATE INDEX tasklist_workflow_fk_idx ON TasklistItem(workflow_id);
|
|
||||||
|
|
||||||
-------------------------------------------------------
|
|
||||||
-- RegistrationData table
|
|
||||||
-------------------------------------------------------
|
|
||||||
CREATE TABLE RegistrationData
|
|
||||||
(
|
|
||||||
registrationdata_id INTEGER PRIMARY KEY,
|
|
||||||
email VARCHAR(64),
|
|
||||||
token VARCHAR(48),
|
|
||||||
expires TIMESTAMP
|
|
||||||
);
|
|
||||||
|
|
||||||
|
|
||||||
-------------------------------------------------------
|
|
||||||
-- Subscription table
|
|
||||||
-------------------------------------------------------
|
|
||||||
CREATE TABLE Subscription
|
|
||||||
(
|
|
||||||
subscription_id INTEGER PRIMARY KEY,
|
|
||||||
eperson_id INTEGER REFERENCES EPerson(eperson_id),
|
|
||||||
collection_id INTEGER REFERENCES Collection(collection_id)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX subs_eperson_fk_idx ON Subscription(eperson_id);
|
|
||||||
CREATE INDEX subs_collection_fk_idx ON Subscription(collection_id);
|
|
||||||
|
|
||||||
|
|
||||||
-------------------------------------------------------------------------------
|
|
||||||
-- EPersonGroup2WorkspaceItem table
|
|
||||||
-------------------------------------------------------------------------------
|
|
||||||
|
|
||||||
CREATE TABLE epersongroup2workspaceitem
|
|
||||||
(
|
|
||||||
id integer,
|
|
||||||
eperson_group_id integer REFERENCES EPersonGroup(eperson_group_id),
|
|
||||||
workspace_item_id integer REFERENCES WorkspaceItem(workspace_item_id),
|
|
||||||
CONSTRAINT epersongroup2item_pkey PRIMARY KEY (id)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX epg2wi_group_fk_idx ON epersongroup2workspaceitem(eperson_group_id);
|
|
||||||
CREATE INDEX epg2wi_workspace_fk_idx ON epersongroup2workspaceitem(workspace_item_id);
|
|
||||||
|
|
||||||
-------------------------------------------------------
|
|
||||||
-- Communities2Item table
|
|
||||||
-------------------------------------------------------
|
|
||||||
CREATE TABLE Communities2Item
|
|
||||||
(
|
|
||||||
id INTEGER PRIMARY KEY,
|
|
||||||
community_id INTEGER REFERENCES Community(community_id),
|
|
||||||
item_id INTEGER REFERENCES Item(item_id)
|
|
||||||
);
|
|
||||||
|
|
||||||
-- Index by item_id for update/re-index
|
|
||||||
CREATE INDEX Communities2Item_item_id_idx ON Communities2Item( item_id );
|
|
||||||
|
|
||||||
CREATE INDEX Comm2Item_community_fk_idx ON Communities2Item( community_id );
|
|
||||||
|
|
||||||
-------------------------------------------------------
|
|
||||||
-- Community2Item view
|
|
||||||
------------------------------------------------------
|
|
||||||
CREATE VIEW Community2Item as
|
|
||||||
SELECT Community2Collection.community_id, Collection2Item.item_id
|
|
||||||
FROM Community2Collection, Collection2Item
|
|
||||||
WHERE Collection2Item.collection_id = Community2Collection.collection_id
|
|
||||||
;
|
|
||||||
|
|
||||||
-------------------------------------------------------------------------
|
|
||||||
-- Tables to manage cache of item counts for communities and collections
|
|
||||||
-------------------------------------------------------------------------
|
|
||||||
|
|
||||||
CREATE TABLE collection_item_count (
|
|
||||||
collection_id INTEGER PRIMARY KEY REFERENCES collection(collection_id),
|
|
||||||
count INTEGER
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE community_item_count (
|
|
||||||
community_id INTEGER PRIMARY KEY REFERENCES community(community_id),
|
|
||||||
count INTEGER
|
|
||||||
);
|
|
||||||
|
|
||||||
-------------------------------------------------------
|
|
||||||
-- Create 'special' groups, for anonymous access
|
|
||||||
-- and administrators
|
|
||||||
-------------------------------------------------------
|
|
||||||
-- We don't use getnextid() for 'anonymous' since the sequences start at '1'
|
|
||||||
INSERT INTO epersongroup VALUES(0, 'Anonymous');
|
|
||||||
INSERT INTO epersongroup VALUES(NEXTVAL('epersongroup_seq'), 'Administrator');
|
|
||||||
|
|
||||||
|
|
||||||
-------------------------------------------------------
|
|
||||||
-- Create the checksum checker tables
|
|
||||||
-------------------------------------------------------
|
|
||||||
-- list of the possible results as determined
|
|
||||||
-- by the system or an administrator
|
|
||||||
|
|
||||||
CREATE TABLE checksum_results
|
|
||||||
(
|
|
||||||
result_code VARCHAR PRIMARY KEY,
|
|
||||||
result_description VARCHAR
|
|
||||||
);
|
|
||||||
|
|
||||||
|
|
||||||
-- This table has a one-to-one relationship
|
|
||||||
-- with the bitstream table. A row will be inserted
|
|
||||||
-- every time a row is inserted into the bitstream table, and
|
|
||||||
-- that row will be updated every time the checksum is
|
|
||||||
-- re-calculated.
|
|
||||||
|
|
||||||
CREATE TABLE most_recent_checksum
|
|
||||||
(
|
|
||||||
bitstream_id INTEGER PRIMARY KEY REFERENCES bitstream(bitstream_id),
|
|
||||||
to_be_processed BOOLEAN NOT NULL,
|
|
||||||
expected_checksum VARCHAR NOT NULL,
|
|
||||||
current_checksum VARCHAR NOT NULL,
|
|
||||||
last_process_start_date TIMESTAMP NOT NULL,
|
|
||||||
last_process_end_date TIMESTAMP NOT NULL,
|
|
||||||
checksum_algorithm VARCHAR NOT NULL,
|
|
||||||
matched_prev_checksum BOOLEAN NOT NULL,
|
|
||||||
result VARCHAR REFERENCES checksum_results(result_code)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX mrc_result_fk_idx ON most_recent_checksum( result );
|
|
||||||
|
|
||||||
-- A row will be inserted into this table every
|
|
||||||
-- time a checksum is re-calculated.
|
|
||||||
|
|
||||||
CREATE TABLE checksum_history
|
|
||||||
(
|
|
||||||
check_id BIGINT PRIMARY KEY,
|
|
||||||
bitstream_id INTEGER,
|
|
||||||
process_start_date TIMESTAMP,
|
|
||||||
process_end_date TIMESTAMP,
|
|
||||||
checksum_expected VARCHAR,
|
|
||||||
checksum_calculated VARCHAR,
|
|
||||||
result VARCHAR REFERENCES checksum_results(result_code)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX ch_result_fk_idx ON checksum_history( result );
|
|
||||||
|
|
||||||
|
|
||||||
-- this will insert into the result code
|
|
||||||
-- the initial results that should be
|
|
||||||
-- possible
|
|
||||||
|
|
||||||
insert into checksum_results
|
|
||||||
values
|
|
||||||
(
|
|
||||||
'INVALID_HISTORY',
|
|
||||||
'Install of the cheksum checking code do not consider this history as valid'
|
|
||||||
);
|
|
||||||
|
|
||||||
insert into checksum_results
|
|
||||||
values
|
|
||||||
(
|
|
||||||
'BITSTREAM_NOT_FOUND',
|
|
||||||
'The bitstream could not be found'
|
|
||||||
);
|
|
||||||
|
|
||||||
insert into checksum_results
|
|
||||||
values
|
|
||||||
(
|
|
||||||
'CHECKSUM_MATCH',
|
|
||||||
'Current checksum matched previous checksum'
|
|
||||||
);
|
|
||||||
|
|
||||||
insert into checksum_results
|
|
||||||
values
|
|
||||||
(
|
|
||||||
'CHECKSUM_NO_MATCH',
|
|
||||||
'Current checksum does not match previous checksum'
|
|
||||||
);
|
|
||||||
|
|
||||||
insert into checksum_results
|
|
||||||
values
|
|
||||||
(
|
|
||||||
'CHECKSUM_PREV_NOT_FOUND',
|
|
||||||
'Previous checksum was not found: no comparison possible'
|
|
||||||
);
|
|
||||||
|
|
||||||
insert into checksum_results
|
|
||||||
values
|
|
||||||
(
|
|
||||||
'BITSTREAM_INFO_NOT_FOUND',
|
|
||||||
'Bitstream info not found'
|
|
||||||
);
|
|
||||||
|
|
||||||
insert into checksum_results
|
|
||||||
values
|
|
||||||
(
|
|
||||||
'CHECKSUM_ALGORITHM_INVALID',
|
|
||||||
'Invalid checksum algorithm'
|
|
||||||
);
|
|
||||||
insert into checksum_results
|
|
||||||
values
|
|
||||||
(
|
|
||||||
'BITSTREAM_NOT_PROCESSED',
|
|
||||||
'Bitstream marked to_be_processed=false'
|
|
||||||
);
|
|
||||||
insert into checksum_results
|
|
||||||
values
|
|
||||||
(
|
|
||||||
'BITSTREAM_MARKED_DELETED',
|
|
||||||
'Bitstream marked deleted in bitstream table'
|
|
||||||
);
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
-------------------------------------------------------
|
|
||||||
-- Create the harvest settings table
|
|
||||||
-------------------------------------------------------
|
|
||||||
-- Values used by the OAIHarvester to harvest a collection
|
|
||||||
-- HarvestInstance is the DAO class for this table
|
|
||||||
|
|
||||||
CREATE TABLE harvested_collection
|
|
||||||
(
|
|
||||||
collection_id INTEGER REFERENCES collection(collection_id) ON DELETE CASCADE,
|
|
||||||
harvest_type INTEGER,
|
|
||||||
oai_source VARCHAR,
|
|
||||||
oai_set_id VARCHAR,
|
|
||||||
harvest_message VARCHAR,
|
|
||||||
metadata_config_id VARCHAR,
|
|
||||||
harvest_status INTEGER,
|
|
||||||
harvest_start_time TIMESTAMP,
|
|
||||||
last_harvested TIMESTAMP,
|
|
||||||
id INTEGER PRIMARY KEY
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX harvested_collection_fk_idx ON harvested_collection(collection_id);
|
|
||||||
|
|
||||||
|
|
||||||
CREATE TABLE harvested_item
|
|
||||||
(
|
|
||||||
item_id INTEGER REFERENCES item(item_id) ON DELETE CASCADE,
|
|
||||||
last_harvested TIMESTAMP,
|
|
||||||
oai_id VARCHAR,
|
|
||||||
id INTEGER PRIMARY KEY
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX harvested_item_fk_idx ON harvested_item(item_id);
|
|
||||||
|
|
||||||
|
|
||||||
CREATE TABLE versionhistory
|
|
||||||
(
|
|
||||||
versionhistory_id INTEGER NOT NULL PRIMARY KEY
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE versionitem
|
|
||||||
(
|
|
||||||
versionitem_id INTEGER NOT NULL PRIMARY KEY,
|
|
||||||
item_id INTEGER REFERENCES Item(item_id),
|
|
||||||
version_number INTEGER,
|
|
||||||
eperson_id INTEGER REFERENCES EPerson(eperson_id),
|
|
||||||
version_date TIMESTAMP,
|
|
||||||
version_summary VARCHAR(255),
|
|
||||||
versionhistory_id INTEGER REFERENCES VersionHistory(versionhistory_id)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE SEQUENCE versionitem_seq;
|
|
||||||
CREATE SEQUENCE versionhistory_seq;
|
|
||||||
|
|
||||||
CREATE TABLE Webapp
|
|
||||||
(
|
|
||||||
webapp_id INTEGER NOT NULL PRIMARY KEY,
|
|
||||||
AppName VARCHAR(32),
|
|
||||||
URL VARCHAR,
|
|
||||||
Started TIMESTAMP,
|
|
||||||
isUI INTEGER
|
|
||||||
);
|
|
@@ -87,7 +87,7 @@ CREATE TABLE BitstreamFormatRegistry
|
|||||||
description VARCHAR2(2000),
|
description VARCHAR2(2000),
|
||||||
support_level INTEGER,
|
support_level INTEGER,
|
||||||
-- Identifies internal types
|
-- Identifies internal types
|
||||||
internal NUMBER(1)
|
internal NUMBER(1)
|
||||||
);
|
);
|
||||||
|
|
||||||
-------------------------------------------------------
|
-------------------------------------------------------
|
||||||
@@ -209,7 +209,8 @@ CREATE INDEX item_submitter_fk_idx ON Item(submitter_id);
|
|||||||
CREATE TABLE Bundle
|
CREATE TABLE Bundle
|
||||||
(
|
(
|
||||||
bundle_id INTEGER PRIMARY KEY,
|
bundle_id INTEGER PRIMARY KEY,
|
||||||
name VARCHAR2(16), -- ORIGINAL | THUMBNAIL | TEXT
|
-- name: ORIGINAL | THUMBNAIL | TEXT
|
||||||
|
name VARCHAR2(16),
|
||||||
primary_bitstream_id INTEGER REFERENCES Bitstream(bitstream_id)
|
primary_bitstream_id INTEGER REFERENCES Bitstream(bitstream_id)
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -469,8 +470,8 @@ CREATE TABLE WorkspaceItem
|
|||||||
workspace_item_id INTEGER PRIMARY KEY,
|
workspace_item_id INTEGER PRIMARY KEY,
|
||||||
item_id INTEGER REFERENCES Item(item_id),
|
item_id INTEGER REFERENCES Item(item_id),
|
||||||
collection_id INTEGER REFERENCES Collection(collection_id),
|
collection_id INTEGER REFERENCES Collection(collection_id),
|
||||||
-- Answers to questions on first page of submit UI
|
-- Answers to questions on first page of submit UI (all boolean)
|
||||||
multiple_titles NUMBER(1), -- boolean
|
multiple_titles NUMBER(1),
|
||||||
published_before NUMBER(1),
|
published_before NUMBER(1),
|
||||||
multiple_files NUMBER(1),
|
multiple_files NUMBER(1),
|
||||||
-- How for the user has got in the submit process
|
-- How for the user has got in the submit process
|
||||||
@@ -487,7 +488,7 @@ CREATE INDEX workspace_coll_fk_idx ON WorkspaceItem(collection_id);
|
|||||||
CREATE TABLE WorkflowItem
|
CREATE TABLE WorkflowItem
|
||||||
(
|
(
|
||||||
workflow_id INTEGER PRIMARY KEY,
|
workflow_id INTEGER PRIMARY KEY,
|
||||||
item_id INTEGER REFERENCES Item(item_id) UNIQUE,
|
item_id INTEGER UNIQUE REFERENCES Item(item_id),
|
||||||
collection_id INTEGER REFERENCES Collection(collection_id),
|
collection_id INTEGER REFERENCES Collection(collection_id),
|
||||||
state INTEGER,
|
state INTEGER,
|
||||||
owner INTEGER REFERENCES EPerson(eperson_id),
|
owner INTEGER REFERENCES EPerson(eperson_id),
|
||||||
@@ -575,9 +576,9 @@ CREATE INDEX Comm2Item_community_fk_idx ON Communities2Item( community_id );
|
|||||||
-- Community2Item view
|
-- Community2Item view
|
||||||
------------------------------------------------------
|
------------------------------------------------------
|
||||||
CREATE VIEW Community2Item as
|
CREATE VIEW Community2Item as
|
||||||
SELECT Community2Collection.community_id, Collection2Item.item_id
|
SELECT Community2Collection.community_id, Collection2Item.item_id
|
||||||
FROM Community2Collection, Collection2Item
|
FROM Community2Collection, Collection2Item
|
||||||
WHERE Collection2Item.collection_id = Community2Collection.collection_id
|
WHERE Collection2Item.collection_id = Community2Collection.collection_id
|
||||||
;
|
;
|
||||||
|
|
||||||
-------------------------------------------------------------------------
|
-------------------------------------------------------------------------
|
||||||
@@ -661,61 +662,61 @@ CREATE INDEX ch_result_fk_idx ON checksum_history( result );
|
|||||||
-- possible
|
-- possible
|
||||||
|
|
||||||
insert into checksum_results
|
insert into checksum_results
|
||||||
values
|
values
|
||||||
(
|
(
|
||||||
'INVALID_HISTORY',
|
'INVALID_HISTORY',
|
||||||
'Install of the cheksum checking code do not consider this history as valid'
|
'Install of the cheksum checking code do not consider this history as valid'
|
||||||
);
|
);
|
||||||
|
|
||||||
insert into checksum_results
|
insert into checksum_results
|
||||||
values
|
values
|
||||||
(
|
(
|
||||||
'BITSTREAM_NOT_FOUND',
|
'BITSTREAM_NOT_FOUND',
|
||||||
'The bitstream could not be found'
|
'The bitstream could not be found'
|
||||||
);
|
);
|
||||||
|
|
||||||
insert into checksum_results
|
insert into checksum_results
|
||||||
values
|
values
|
||||||
(
|
(
|
||||||
'CHECKSUM_MATCH',
|
'CHECKSUM_MATCH',
|
||||||
'Current checksum matched previous checksum'
|
'Current checksum matched previous checksum'
|
||||||
);
|
);
|
||||||
|
|
||||||
insert into checksum_results
|
insert into checksum_results
|
||||||
values
|
values
|
||||||
(
|
(
|
||||||
'CHECKSUM_NO_MATCH',
|
'CHECKSUM_NO_MATCH',
|
||||||
'Current checksum does not match previous checksum'
|
'Current checksum does not match previous checksum'
|
||||||
);
|
);
|
||||||
|
|
||||||
insert into checksum_results
|
insert into checksum_results
|
||||||
values
|
values
|
||||||
(
|
(
|
||||||
'CHECKSUM_PREV_NOT_FOUND',
|
'CHECKSUM_PREV_NOT_FOUND',
|
||||||
'Previous checksum was not found: no comparison possible'
|
'Previous checksum was not found: no comparison possible'
|
||||||
);
|
);
|
||||||
|
|
||||||
insert into checksum_results
|
insert into checksum_results
|
||||||
values
|
values
|
||||||
(
|
(
|
||||||
'BITSTREAM_INFO_NOT_FOUND',
|
'BITSTREAM_INFO_NOT_FOUND',
|
||||||
'Bitstream info not found'
|
'Bitstream info not found'
|
||||||
);
|
);
|
||||||
|
|
||||||
insert into checksum_results
|
insert into checksum_results
|
||||||
values
|
values
|
||||||
(
|
(
|
||||||
'CHECKSUM_ALGORITHM_INVALID',
|
'CHECKSUM_ALGORITHM_INVALID',
|
||||||
'Invalid checksum algorithm'
|
'Invalid checksum algorithm'
|
||||||
);
|
);
|
||||||
insert into checksum_results
|
insert into checksum_results
|
||||||
values
|
values
|
||||||
(
|
(
|
||||||
'BITSTREAM_NOT_PROCESSED',
|
'BITSTREAM_NOT_PROCESSED',
|
||||||
'Bitstream marked to_be_processed=false'
|
'Bitstream marked to_be_processed=false'
|
||||||
);
|
);
|
||||||
insert into checksum_results
|
insert into checksum_results
|
||||||
values
|
values
|
||||||
(
|
(
|
||||||
'BITSTREAM_MARKED_DELETED',
|
'BITSTREAM_MARKED_DELETED',
|
||||||
'Bitstream marked deleted in bitstream table'
|
'Bitstream marked deleted in bitstream table'
|
||||||
|
@@ -252,7 +252,8 @@ CREATE INDEX item_submitter_fk_idx ON Item(submitter_id);
|
|||||||
CREATE TABLE Bundle
|
CREATE TABLE Bundle
|
||||||
(
|
(
|
||||||
bundle_id INTEGER PRIMARY KEY,
|
bundle_id INTEGER PRIMARY KEY,
|
||||||
name VARCHAR(16), -- ORIGINAL | THUMBNAIL | TEXT
|
-- name: ORIGINAL | THUMBNAIL | TEXT
|
||||||
|
name VARCHAR(16),
|
||||||
primary_bitstream_id INTEGER REFERENCES Bitstream(bitstream_id)
|
primary_bitstream_id INTEGER REFERENCES Bitstream(bitstream_id)
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -530,11 +531,10 @@ CREATE INDEX workspace_coll_fk_idx ON WorkspaceItem(collection_id);
|
|||||||
CREATE TABLE WorkflowItem
|
CREATE TABLE WorkflowItem
|
||||||
(
|
(
|
||||||
workflow_id INTEGER PRIMARY KEY,
|
workflow_id INTEGER PRIMARY KEY,
|
||||||
item_id INTEGER REFERENCES Item(item_id) UNIQUE,
|
item_id INTEGER UNIQUE REFERENCES Item(item_id),
|
||||||
collection_id INTEGER REFERENCES Collection(collection_id),
|
collection_id INTEGER REFERENCES Collection(collection_id),
|
||||||
state INTEGER,
|
state INTEGER,
|
||||||
owner INTEGER REFERENCES EPerson(eperson_id),
|
owner INTEGER REFERENCES EPerson(eperson_id),
|
||||||
|
|
||||||
-- Answers to questions on first page of submit UI
|
-- Answers to questions on first page of submit UI
|
||||||
multiple_titles BOOL,
|
multiple_titles BOOL,
|
||||||
published_before BOOL,
|
published_before BOOL,
|
||||||
@@ -621,9 +621,9 @@ CREATE INDEX Comm2Item_community_fk_idx ON Communities2Item( community_id );
|
|||||||
-- Community2Item view
|
-- Community2Item view
|
||||||
------------------------------------------------------
|
------------------------------------------------------
|
||||||
CREATE VIEW Community2Item as
|
CREATE VIEW Community2Item as
|
||||||
SELECT Community2Collection.community_id, Collection2Item.item_id
|
SELECT Community2Collection.community_id, Collection2Item.item_id
|
||||||
FROM Community2Collection, Collection2Item
|
FROM Community2Collection, Collection2Item
|
||||||
WHERE Collection2Item.collection_id = Community2Collection.collection_id
|
WHERE Collection2Item.collection_id = Community2Collection.collection_id
|
||||||
;
|
;
|
||||||
|
|
||||||
-------------------------------------------------------------------------
|
-------------------------------------------------------------------------
|
||||||
@@ -705,61 +705,61 @@ CREATE INDEX ch_result_fk_idx ON checksum_history( result );
|
|||||||
-- possible
|
-- possible
|
||||||
|
|
||||||
insert into checksum_results
|
insert into checksum_results
|
||||||
values
|
values
|
||||||
(
|
(
|
||||||
'INVALID_HISTORY',
|
'INVALID_HISTORY',
|
||||||
'Install of the cheksum checking code do not consider this history as valid'
|
'Install of the cheksum checking code do not consider this history as valid'
|
||||||
);
|
);
|
||||||
|
|
||||||
insert into checksum_results
|
insert into checksum_results
|
||||||
values
|
values
|
||||||
(
|
(
|
||||||
'BITSTREAM_NOT_FOUND',
|
'BITSTREAM_NOT_FOUND',
|
||||||
'The bitstream could not be found'
|
'The bitstream could not be found'
|
||||||
);
|
);
|
||||||
|
|
||||||
insert into checksum_results
|
insert into checksum_results
|
||||||
values
|
values
|
||||||
(
|
(
|
||||||
'CHECKSUM_MATCH',
|
'CHECKSUM_MATCH',
|
||||||
'Current checksum matched previous checksum'
|
'Current checksum matched previous checksum'
|
||||||
);
|
);
|
||||||
|
|
||||||
insert into checksum_results
|
insert into checksum_results
|
||||||
values
|
values
|
||||||
(
|
(
|
||||||
'CHECKSUM_NO_MATCH',
|
'CHECKSUM_NO_MATCH',
|
||||||
'Current checksum does not match previous checksum'
|
'Current checksum does not match previous checksum'
|
||||||
);
|
);
|
||||||
|
|
||||||
insert into checksum_results
|
insert into checksum_results
|
||||||
values
|
values
|
||||||
(
|
(
|
||||||
'CHECKSUM_PREV_NOT_FOUND',
|
'CHECKSUM_PREV_NOT_FOUND',
|
||||||
'Previous checksum was not found: no comparison possible'
|
'Previous checksum was not found: no comparison possible'
|
||||||
);
|
);
|
||||||
|
|
||||||
insert into checksum_results
|
insert into checksum_results
|
||||||
values
|
values
|
||||||
(
|
(
|
||||||
'BITSTREAM_INFO_NOT_FOUND',
|
'BITSTREAM_INFO_NOT_FOUND',
|
||||||
'Bitstream info not found'
|
'Bitstream info not found'
|
||||||
);
|
);
|
||||||
|
|
||||||
insert into checksum_results
|
insert into checksum_results
|
||||||
values
|
values
|
||||||
(
|
(
|
||||||
'CHECKSUM_ALGORITHM_INVALID',
|
'CHECKSUM_ALGORITHM_INVALID',
|
||||||
'Invalid checksum algorithm'
|
'Invalid checksum algorithm'
|
||||||
);
|
);
|
||||||
insert into checksum_results
|
insert into checksum_results
|
||||||
values
|
values
|
||||||
(
|
(
|
||||||
'BITSTREAM_NOT_PROCESSED',
|
'BITSTREAM_NOT_PROCESSED',
|
||||||
'Bitstream marked to_be_processed=false'
|
'Bitstream marked to_be_processed=false'
|
||||||
);
|
);
|
||||||
insert into checksum_results
|
insert into checksum_results
|
||||||
values
|
values
|
||||||
(
|
(
|
||||||
'BITSTREAM_MARKED_DELETED',
|
'BITSTREAM_MARKED_DELETED',
|
||||||
'Bitstream marked deleted in bitstream table'
|
'Bitstream marked deleted in bitstream table'
|
||||||
|
18
pom.xml
18
pom.xml
@@ -261,7 +261,10 @@
|
|||||||
<build>
|
<build>
|
||||||
<plugins>
|
<plugins>
|
||||||
<!-- This plugin builds the testEnvironment.zip package
|
<!-- This plugin builds the testEnvironment.zip package
|
||||||
based on the specifications in testEnvironment.xml -->
|
based on the specifications in testEnvironment.xml.
|
||||||
|
TestEnvironment.zip is an entire DSpace installation
|
||||||
|
directory, which is installed by 'dspace-api' and
|
||||||
|
used to run our DSpace Unit/Integration tests. -->
|
||||||
<plugin>
|
<plugin>
|
||||||
<artifactId>maven-assembly-plugin</artifactId>
|
<artifactId>maven-assembly-plugin</artifactId>
|
||||||
<executions>
|
<executions>
|
||||||
@@ -1025,28 +1028,31 @@
|
|||||||
<artifactId>slf4j-log4j12</artifactId>
|
<artifactId>slf4j-log4j12</artifactId>
|
||||||
<version>${slf4j.version}</version>
|
<version>${slf4j.version}</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
<!-- JMockit and JUnit are used for Unit/Integration tests -->
|
||||||
<dependency> <!-- Keep jmockit before junit -->
|
<dependency> <!-- Keep jmockit before junit -->
|
||||||
<groupId>com.googlecode.jmockit</groupId>
|
<groupId>org.jmockit</groupId>
|
||||||
<artifactId>jmockit</artifactId>
|
<artifactId>jmockit</artifactId>
|
||||||
<version>1.1</version>
|
<version>1.10</version>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>junit</groupId>
|
<groupId>junit</groupId>
|
||||||
<artifactId>junit</artifactId>
|
<artifactId>junit</artifactId>
|
||||||
<version>4.8.1</version>
|
<version>4.11</version>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
<!-- H2 is an in-memory database used for Unit/Integration tests -->
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>com.h2database</groupId>
|
<groupId>com.h2database</groupId>
|
||||||
<artifactId>h2</artifactId>
|
<artifactId>h2</artifactId>
|
||||||
<version>1.2.137</version>
|
<version>1.4.180</version>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
<!-- Contiperf is used for performance tests within our Unit/Integration tests -->
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.databene</groupId>
|
<groupId>org.databene</groupId>
|
||||||
<artifactId>contiperf</artifactId>
|
<artifactId>contiperf</artifactId>
|
||||||
<version>1.06</version>
|
<version>2.2.0</version>
|
||||||
<scope>test</scope>
|
<scope>test</scope>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
<dependency>
|
||||||
|
@@ -14,6 +14,9 @@
|
|||||||
xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2 http://maven.apache.org/xsd/assembly-1.1.2.xsd">
|
xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2 http://maven.apache.org/xsd/assembly-1.1.2.xsd">
|
||||||
<!--
|
<!--
|
||||||
Package DSpace's common testing environment (configuration, etc.)
|
Package DSpace's common testing environment (configuration, etc.)
|
||||||
|
into a "testEnvironment.zip". This essentially creates a zipped up, test
|
||||||
|
version of a DSpace installation directory, complete with bin, configs,
|
||||||
|
even a dummy assetstore, etc.
|
||||||
-->
|
-->
|
||||||
<id>testEnvironment</id>
|
<id>testEnvironment</id>
|
||||||
<formats>
|
<formats>
|
||||||
@@ -22,13 +25,16 @@
|
|||||||
<includeBaseDirectory>false</includeBaseDirectory>
|
<includeBaseDirectory>false</includeBaseDirectory>
|
||||||
|
|
||||||
<moduleSets>
|
<moduleSets>
|
||||||
|
<!-- First, copy the following from our 'dspace' assembly project into
|
||||||
|
a "dspace" subdirectory in the final ZIP file. -->
|
||||||
<moduleSet>
|
<moduleSet>
|
||||||
|
<includes>
|
||||||
|
<include>org.dspace:dspace</include>
|
||||||
|
</includes>
|
||||||
<sources>
|
<sources>
|
||||||
<outputDirectoryMapping>dspace</outputDirectoryMapping>
|
<outputDirectoryMapping>dspace</outputDirectoryMapping>
|
||||||
<fileSets>
|
<fileSets>
|
||||||
<fileSet> <!-- installable DSpace files -->
|
<fileSet>
|
||||||
<directory />
|
|
||||||
<outputDirectory />
|
|
||||||
<!-- Copy necessary DSpace subdirectories into Test environment -->
|
<!-- Copy necessary DSpace subdirectories into Test environment -->
|
||||||
<includes>
|
<includes>
|
||||||
<include>bin/**</include>
|
<include>bin/**</include>
|
||||||
@@ -51,13 +57,29 @@
|
|||||||
</includes>
|
</includes>
|
||||||
<filtered>true</filtered>
|
<filtered>true</filtered>
|
||||||
</fileSet>
|
</fileSet>
|
||||||
<fileSet> <!-- test data -->
|
</fileSets>
|
||||||
|
</sources>
|
||||||
|
</moduleSet>
|
||||||
|
<!-- Next, search for a 'src/test/data/dspaceFolder' data directory in
|
||||||
|
ANY of our modules. If found, copy its contents into the same "dspace"
|
||||||
|
subdirectory in the final ZIP file, as this is data to be used in testing.
|
||||||
|
NOTE: This *might* overwrite/overlay default files copied from above. -->
|
||||||
|
<moduleSet>
|
||||||
|
<includes>
|
||||||
|
<include>org.dspace:*</include>
|
||||||
|
</includes>
|
||||||
|
<sources>
|
||||||
|
<outputDirectoryMapping>dspace</outputDirectoryMapping>
|
||||||
|
<fileSets>
|
||||||
|
<fileSet>
|
||||||
<directory>src/test/data/dspaceFolder</directory>
|
<directory>src/test/data/dspaceFolder</directory>
|
||||||
<outputDirectory />
|
|
||||||
</fileSet>
|
</fileSet>
|
||||||
</fileSets>
|
</fileSets>
|
||||||
</sources>
|
</sources>
|
||||||
</moduleSet>
|
</moduleSet>
|
||||||
|
<!-- Finally, copy the 'dspace.cfg.more' from the 'dspace-api' into
|
||||||
|
the root directory of the ZIP. This config will be merged/weaved
|
||||||
|
into the default dspace.cfg (see fileweaver maven plugin) -->
|
||||||
<moduleSet>
|
<moduleSet>
|
||||||
<includes>
|
<includes>
|
||||||
<include>org.dspace:dspace-api</include>
|
<include>org.dspace:dspace-api</include>
|
||||||
|
@@ -1,14 +1,29 @@
|
|||||||
# DSpace build.properties
|
# DSpace build.properties
|
||||||
# This file should be customised to suit your build environment.
|
# This file should be customised to suit your build environment.
|
||||||
# Note that not all configuration is handled here, only the most common properties that tend to differ between build environments.
|
# Note that not all configuration is handled here, only the most common
|
||||||
|
# properties that tend to differ between build environments.
|
||||||
# For adjusting global settings or more complex settings, edit the relevant config file.
|
# For adjusting global settings or more complex settings, edit the relevant config file.
|
||||||
|
#
|
||||||
|
# IMPORTANT: Do not remove or comment out settings in build.properties
|
||||||
|
# When you edit the "build.properties" file (or a custom *.properties file),
|
||||||
|
# take care not to remove or comment out any settings. Doing so, may cause
|
||||||
|
# your final "dspace.cfg" file to be misconfigured with regards to that
|
||||||
|
# particular setting. Instead, if you wish to remove/disable a particular
|
||||||
|
# setting, just clear out its value. For example, if you don't want to be
|
||||||
|
# notified of new user registrations, ensure the "mail.registration.notify"
|
||||||
|
# setting has no value, e.g. "mail.registration.notify="
|
||||||
|
#
|
||||||
|
|
||||||
##########################
|
##########################
|
||||||
# SERVER CONFIGURATION #
|
# SERVER CONFIGURATION #
|
||||||
##########################
|
##########################
|
||||||
|
|
||||||
# DSpace installation directory. Data will be stored within this directory
|
# DSpace installation directory. This is the location where you want
|
||||||
#dspace.dir=/dspace
|
# to install DSpace. NOTE: this value will be copied over to the
|
||||||
|
# "dspace.dir" setting in the final "dspace.cfg" file. It can be
|
||||||
|
# modified later on in your "dspace.cfg", if needed.
|
||||||
|
# NOTE: COMMENTED OUT FOR TEST ENVIRONMENT, AS WE WILL SET THIS VIA MAVEN
|
||||||
|
#dspace.install.dir=/dspace
|
||||||
|
|
||||||
# DSpace host name - should match base URL. Do not include port number
|
# DSpace host name - should match base URL. Do not include port number
|
||||||
dspace.hostname = localhost
|
dspace.hostname = localhost
|
||||||
@@ -30,11 +45,19 @@ default.language = en_US
|
|||||||
##########################
|
##########################
|
||||||
|
|
||||||
# Database name ("oracle", or "postgres")
|
# Database name ("oracle", or "postgres")
|
||||||
|
db.name=oracle
|
||||||
|
|
||||||
db.name = oracle
|
# Uncomment the appropriate block below for your database.
|
||||||
|
# postgres
|
||||||
|
#db.driver=org.postgresql.Driver
|
||||||
|
#db.url=jdbc:postgresql://localhost:5432/dspace
|
||||||
|
#db.username=dspace
|
||||||
|
#db.password=dspace
|
||||||
|
|
||||||
|
# oracle
|
||||||
|
db.driver = org.h2.Driver
|
||||||
# Use a 10 second database lock timeout to avoid occasional JDBC lock timeout errors
|
# Use a 10 second database lock timeout to avoid occasional JDBC lock timeout errors
|
||||||
db.url = jdbc:h2:mem:test;MODE=Oracle;LOCK_TIMEOUT=10000
|
db.url = jdbc:h2:mem:test;MODE=Oracle;LOCK_TIMEOUT=10000
|
||||||
db.driver = org.h2.Driver
|
|
||||||
db.username = sa
|
db.username = sa
|
||||||
db.password = sa
|
db.password = sa
|
||||||
|
|
||||||
@@ -92,6 +115,7 @@ mail.alert.recipient=
|
|||||||
#mail.registration.notify = email-address-here
|
#mail.registration.notify = email-address-here
|
||||||
mail.registration.notify=
|
mail.registration.notify=
|
||||||
|
|
||||||
|
|
||||||
########################
|
########################
|
||||||
# HANDLE CONFIGURATION #
|
# HANDLE CONFIGURATION #
|
||||||
########################
|
########################
|
||||||
@@ -128,7 +152,12 @@ http.proxy.host =
|
|||||||
# port number of proxy server
|
# port number of proxy server
|
||||||
http.proxy.port =
|
http.proxy.port =
|
||||||
|
|
||||||
|
#####################
|
||||||
|
# LOGLEVEL SETTINGS #
|
||||||
|
#####################
|
||||||
|
loglevel.other = INFO
|
||||||
|
# loglevel.other: Log level for other third-party tools/APIs used by DSpace
|
||||||
|
# Possible values (from most to least info): DEBUG, INFO, WARN, ERROR, FATAL
|
||||||
|
loglevel.dspace = INFO
|
||||||
|
# loglevel.dspace: Log level for all DSpace-specific code (org.dspace.*)
|
||||||
|
# Possible values (from most to least info): DEBUG, INFO, WARN, ERROR, FATAL
|
||||||
|
Reference in New Issue
Block a user