Compare commits

..

274 Commits

Author SHA1 Message Date
Mark H. Wood
e5cb62997a [maven-release-plugin] prepare release dspace-6.2 2017-09-07 16:14:03 -04:00
Mark H. Wood
ff7c7e3d6d Regenerate third-party license list 2017-09-07 15:16:09 -04:00
Mark H. Wood
068047cb11 Update LICENSE copyright claim with current year. 2017-09-07 15:08:17 -04:00
Terry Brady
0e7c7c0886 Merge pull request #1836 from Generalelektrix/dspace-6_x
DS-3687
2017-09-06 12:14:19 -07:00
Generalelektrix
6626901564 DS-3687
Making key generic for legacy note value since it is not only used in jspui.
2017-09-06 10:28:14 -04:00
Generalelektrix
6b8f072d3e DS-3687 Hard coded note not compatible with multi-lingual sites for legacy stats
Changed hard coded string for reference to a new field in language bundle.
2017-09-06 10:18:50 -04:00
Tim Donohue
fa587c52ed Merge pull request #1830 from tuub/DS-3680
DS-3680: Database changes of consumers aren't persisted anymore
2017-08-31 06:41:19 +10:00
Pascal-Nicolas Becker
d753c09b22 DS-3680: Remove problematic unaching. Also see DS-3681 as follow-up. 2017-08-24 18:25:41 +02:00
Pascal-Nicolas Becker
fbb45ba758 DS-3680: clarify that we need to dispatch events before committing 2017-08-24 18:25:20 +02:00
Pascal-Nicolas Becker
014456e1ed Revert "Events must be dispatched after commit() to ensure they can retrieve latest data from DB"
This reverts commit 646936a3d8.
2017-08-24 18:22:58 +02:00
Terry Brady
258b4f00e9 [DS-3602] Ensure Consistent Use of Legacy Id in Usage Queries (#1782)
* ensure that owning Item,Coll,Comm use legacy consistently

* scopeId query

* refine queries

* alter id query

* Commenting the behavior of the id / legacyId search

* Address duplicate disp for DSO w legacy and uuid stats
2017-08-17 23:48:25 +10:00
Tim Donohue
3798a12778 Merge pull request #1824 from tdonohue/DS-3656_and_DS-3648
DS-3656 and DS-3648 : Fix several Hibernate caching / saving issues
2017-08-17 06:53:28 +10:00
Hardy Pottinger
bc82adef5e [DS-3674] copied over input-forms.xml to the test config folder 2017-08-15 14:43:41 -05:00
Tim Donohue
d4d61eed68 Replace dispatchEvents() call with an actual commit() to ensure changes are saved 2017-08-10 21:27:34 +00:00
Tim Donohue
646936a3d8 Events must be dispatched after commit() to ensure they can retrieve latest data from DB 2017-08-10 21:27:00 +00:00
Tim Donohue
9dd6bb0f08 DS-3648: Don't uncache submitter and related groups. Also DS-3656: Flush changes before evict() 2017-08-10 21:25:38 +00:00
Terry Brady
0e2ed31deb Merge pull request #1821 from Georgetown-University-Libraries/ds3661r6x
[DS-3661] Port to 6x: ImageMagick PDF Processing Degraded with Color Space Changes
2017-08-09 13:18:55 -07:00
Terry W Brady
1492dfef92 Normalize space 2017-08-09 13:02:04 -07:00
Terry W Brady
8b6c1acab1 Port PR1817, Only request image info if color space 2017-08-09 13:01:17 -07:00
Alan Orth
e88924b7da DS-3517 Allow improved handling of CMYK PDFs
Allow ImageMagick to generate thumbnails with more accurate colors
for PDFs using the CMYK color system. This adds two options to the
dspace.cfg where the user can optionally specify paths to CMYK and
RGB color profiles if they are available on their system (they are
provided by Ghostscript 9.x).

Uses im4java's Info class to determine the color system being used
by the PDF.

See: http://im4java.sourceforge.net/docs/dev-guide.html
2017-08-09 19:45:28 +00:00
Terry Brady
42608e028e Merge pull request #1816 from AlexanderS/fix-discovery-reindex
DS-3660: Fix discovery reindex on metadata change
2017-08-09 12:08:31 -07:00
Alexander Sulfrian
7e68165ded DS-3660: Fix discovery reindex on metadata change
Stored objects may get evicted from the session cache and get into detached
state. Lazy loaded fields are inaccessible and throw an exception on access.

Before using objects they have to be reloaded (retrieved from the
database and associated with the session again).
2017-08-03 16:25:39 +02:00
Tim Donohue
cfecf10e81 Merge pull request #1815 from tdonohue/DS-3659
DS-3659: Database migrate fails to create the initial groups
2017-08-03 23:51:47 +10:00
Alexander Sulfrian
5d656ea922 XMLUI: Remove doubled translation key (#1818)
The key "xmlui.ChoiceLookupTransformer.lookup" is already in line 2368 of the
same file.
2017-08-03 15:23:49 +02:00
Tim Donohue
62e2ac81fb Merge pull request #1814 from AlexanderS/fix/i18n-key-typo
XMLUI/SwordClient: Fix typo in i18n key
2017-08-02 07:33:05 +10:00
Tim Donohue
e9ace604a7 DS-3659: Ensure readonly connections can never rollback 2017-08-01 18:00:28 +00:00
Alexander Sulfrian
7f91528c1a XMLUI/SwordClient: Fix typo in i18n key 2017-07-25 15:21:10 +02:00
Tim Donohue
4881e9da20 [maven-release-plugin] prepare for next development iteration 2017-07-13 12:15:12 -05:00
Tim Donohue
eb4d56201a [maven-release-plugin] prepare release dspace-6.1 2017-07-13 12:15:02 -05:00
Tim Donohue
df9fb114ba Merge pull request #1807 from tdonohue/travis-fixes
Pin versions of SASS and Compass that Travis CI uses
2017-07-14 02:58:35 +10:00
Tim Donohue
f3556278aa Pin versions of SASS and Compass that Travis uses 2017-07-13 16:28:35 +00:00
Tim Donohue
f6af76c6d8 Revert 6.1 release 2017-07-13 14:15:21 +00:00
Tim Donohue
151a5f8fe2 [maven-release-plugin] prepare for next development iteration 2017-07-12 20:55:13 +00:00
Tim Donohue
57044f6698 [maven-release-plugin] prepare release dspace-6.1 2017-07-12 20:55:07 +00:00
Tim Donohue
4954f96f1d Merge pull request #1785 from atmire/DS-3127-DSpace-6_Whitelist-allowable-formats-Google-Scholar-citation_pdf_url
DS-3127 Whitelist allowable formats google scholar citation pdf url
2017-07-12 06:40:45 +10:00
Tim Donohue
972f76e771 Merge pull request #1790 from tomdesair/DS-3632_Correct-update-handle-prefix-script
DS-3632: Correct update-handle-prefix script
2017-07-12 06:27:08 +10:00
Tim Donohue
e30b0cdec6 DS-3431 : Fix broken tests by removing nullifying of global eperson 2017-07-11 16:13:25 +00:00
Pascal-Nicolas Becker
a0f226b763 [DS-3431] Harden DSpace's BasicWorfklowService 2017-07-11 16:10:08 +00:00
Tim Donohue
bcf3110db9 Merge pull request #1723 from atmire/DS-2359
DS-2359 Error when depositing large files via browser (over 2Gb)
2017-07-08 05:56:33 +10:00
Tom Desair
c34b277c8d DS-3628: Check READ resouce policies for items return by REST find-by-metadata-field endpoint 2017-07-07 19:47:26 +00:00
Pascal-Nicolas Becker
6263444f79 DS-3619: AuthorizeService.getAuthorizedGroups(...) should check dates 2017-07-07 19:30:00 +00:00
Tim Donohue
9caff2caab Merge pull request #1799 from tdonohue/DS-3397-6x
[DS-3397] Fix error when getting bitstream policies in REST API (6.x version)
2017-07-07 02:47:42 +10:00
Tim Donohue
6151f4f594 Merge pull request #1798 from atmire/DS-3563-DSpace-6_Missing-index-metadatavalue-resource-type-id
DS-3563: Fix Oracle Flyway migration error
2017-07-06 01:34:38 +10:00
Tim Donohue
f953848a6d [DS-3397] Add null checks to EPerson and Group 2017-07-05 15:27:43 +00:00
Tom Desair
ccc1b1b784 DS-3563: Fix Oracle Flyway migration error 2017-07-05 14:02:29 +02:00
Tom Desair
1bb6369ad6 DS-3127: Update test assert descriptions of GoogleBitstreamComparatorTest 2017-07-04 16:07:57 +02:00
Tom Desair
e31daa0230 DS-3632: Prevent the use of the locate function as this seems to give inconsistent results 2017-06-30 17:13:31 +02:00
Tom Desair
762197b452 DS-3632: Changed the update-handle-prefix script so that it does not change the handle suffix 2017-06-30 16:58:15 +02:00
kshepherd
ecd0230943 Merge pull request #1780 from atmire/DS-3595-6x
DS-3595
2017-06-30 05:41:42 +10:00
Philip Vissenaekens
c9cad9083e Merge branch 'dspace-6_x' into DS-3595-6x 2017-06-29 15:38:20 +02:00
Tom Desair
b462e0ac6d Merge branch 'dspace-6_x' into DS-3127-DSpace-6_Whitelist-allowable-formats-Google-Scholar-citation_pdf_url 2017-06-29 09:55:42 +02:00
Terry Brady
65d638771f Merge pull request #1747 from AlexanderS/localization-input-forms-xmlui
DS-3598: Allow localization of input-forms.xml with XMLUI
2017-06-28 17:15:40 -07:00
Terry Brady
224df82087 Merge pull request #1752 from AlexanderS/fix/DS-3601-npe-feedback-page
DS-3601: Fix NPE when accessing feedback page without "Referer" header
2017-06-28 16:31:44 -07:00
Terry Brady
a6b3ce0d46 Merge pull request #1784 from rivaldi8/DS-3245-csv-linebreaks_ds6
DS-3245: CSV linebreaks not supported by Bulkedit -  DSpace 6
2017-06-28 15:47:39 -07:00
Terry Brady
2944279618 Merge pull request #1727 from tomdesair/DS-3579_Context-mode-and-cache-management-CLI-commands
DS-3579 Context mode and cache management for CLI commands
2017-06-28 14:49:11 -07:00
Tom Desair
fe115125d1 DS-3127: Prevent database updates when directly manipulating the bistream list of a bundle 2017-06-28 17:46:58 +02:00
Tom Desair
6e9dec2c85 DS-3579: Make sure context.complete() can be called when in read-only 2017-06-28 16:15:30 +02:00
Terry Brady
fd298ae462 Merge pull request #1772 from tomdesair/DS-3571_Log-Hibernate-validation-errors
DS-3571 Log hibernate validation errors
2017-06-27 15:22:44 -07:00
Mark H. Wood
470c9b8f50 Merge pull request #1788 from mwoodiupui/DS-3568
[DS-3568] UTF-8 characters are now supported in configuration files
2017-06-26 13:34:04 -04:00
Terry Brady
33d3df72d6 Merge pull request #1732 from samuelcambien/DS-3584
DS-3584 when editing an eperson, trying to change its email address is ignored if another user already has that email address.
2017-06-23 16:56:27 -07:00
Christian Scheible
43cc3bd874 DS-3568. UTF-8 characters are now supported in configuration files 2017-06-22 16:35:30 -04:00
Tom Desair
3dc4909935 Fix IT tests 2017-06-22 17:07:55 +02:00
Tom Desair
71791c720f DS-3127: Process review feedback and fix tests 2017-06-22 15:01:45 +02:00
Àlex Magaz Graça
70a5124373 DS-3245: CSV linebreaks not supported by Bulkedit
When a multiline field contained empty lines, the importer stopped
reading the file. This reverts a change in 53d387fed to stop when the
end of the file has been reached instead.

Fixes https://jira.duraspace.org/browse/DS-3245
2017-06-22 13:57:06 +02:00
Philip Vissenaekens
7879ecdf14 DS-3595 2017-06-21 17:18:30 +02:00
Mark H. Wood
1db3261b54 Merge pull request #1696 from tomdesair/DS-2748_Improve-cocoon-page-not-found-page
DS-2748: Do not throw an exception in the PageNotFoundTransformer
2017-06-21 10:18:53 -04:00
Tom Desair
3732cafc4e Merge branch 'dspace-6_x' into DS-3579_Context-mode-and-cache-management-CLI-commands 2017-06-19 17:36:55 +02:00
Tom Desair
6f52d9700a Merge branch 'dspace-6_x' into DS-3579_Context-mode-and-cache-management-CLI-commands 2017-06-19 17:18:22 +02:00
Tom Desair
769d3b590f DS-3579: Fix bug in metadata-import script 2017-06-19 14:59:00 +02:00
Tom Desair
7d04016436 Merge branch 'DS-3579_Context-mode-and-cache-management-CLI-commands' of https://github.com/tomdesair/DSpace into DS-3579_Context-mode-and-cache-management-CLI-commands 2017-06-19 14:38:28 +02:00
edusperoni
0084ae3833 DS-2291 Autocomplete not working on Mirage2 (#1741)
* fixing autocomplete problem listed on DS-2291. Also fixes the spinner that was being referenced in the wrong path.

* fix common lookup button (now consistent with the author lookup button)
2017-06-14 11:36:45 -05:00
Pascal-Nicolas Becker
fc1b22e59c Merge pull request #1767 from tomdesair/PR-1715
DS-3572: Check authorization for a specified user instead of currentUser
2017-06-13 16:08:33 +02:00
Tom Desair
9af33bc244 DS-3571: Make sure that any Hibernate schema validation error is logged instead of just a NullPointerException 2017-06-13 11:17:20 +02:00
Tom Desair
bd2d81d556 DS-3572: Renamed epersonInGroup to isEPersonInGroup 2017-06-12 15:17:59 +02:00
Tom Desair
f6eb13cf53 DS-3572: Restored behaviour of GroupService.isMember and moved new behaviour to GroupService.isParentOf 2017-06-12 15:05:59 +02:00
Tom Desair
b4a24fff7b DS-3572: Fix bug where normal group membership is ignored if special groups are present + added tests 2017-06-10 14:32:45 +02:00
Tom Desair
8bb7eb0fe5 Improve tests + make GroupService.isMember method more performant for special groups 2017-06-10 00:34:24 +02:00
Tom Desair
f48178ed41 Fix DSpace AIP IT tests: Set correct membership for admin 2017-06-09 20:09:15 +02:00
Tim Donohue
1b70e64f77 Merge pull request #1751 from tomdesair/DS-3406_Sort-Communities-and-Collections-Hibernate-Sort-Annotation
DS-3406: Sort communities and collections iteration 2
2017-06-09 09:35:00 -07:00
Tom Desair
b56bb4de3e Attempt to fix contstraint violation 2017-06-09 17:51:27 +02:00
Tom Desair
139f01fffd Restore GroupServiceImpl.isMember logic + fix tests 2017-06-09 17:30:06 +02:00
frederic
257d75ca0c DS-3406 unit tests for getCollection/getCommunity for different dspace objects 2017-06-09 10:05:36 +02:00
frederic
5422a63f08 DS-3579 removed FETCH keyword and fixed typo in help message of harvest 2017-06-09 09:46:28 +02:00
Pascal-Nicolas Becker
853e6baff1 Merge pull request #1761 from tdonohue/DS-3604
DS-3604: Fix Bitstream reordering in JSPUI
2017-06-06 23:08:06 +02:00
Tim Donohue
205d8b9f92 Refactor BundleServiceImpl.setOrder() to be more failsafe. Update Tests to prove out (previously these new tests failed) 2017-06-06 14:07:16 +00:00
Pascal-Nicolas Becker
bb1e13a3b2 DS-3572: Adding simple unit test for DS-3572. 2017-06-06 15:54:13 +02:00
Pascal-Nicolas Becker
d2311663d3 DS-3572: Check authorization for a specified user instead of currentUser 2017-06-06 15:54:12 +02:00
kshepherd
7d1836bddc Merge pull request #1762 from Georgetown-University-Libraries/ds3563-6x
[DS-3563] Port PR to 6x
2017-06-06 12:36:46 +12:00
Tom Desair
36002b5829 DS-3563: Conditional create index for Oracle 2017-06-02 13:19:02 -07:00
Tom Desair
6392e195b9 DS-3563 Added missing index on metadatavalue.resource_type_id 2017-06-02 13:18:43 -07:00
Tim Donohue
d37d3a04ac Create a valid unit test for BundleServiceImpl.setOrder() method 2017-06-02 20:14:29 +00:00
Tim Donohue
ef3afe19eb DS-3604: Sync JSPUI bitstream reorder code with XMLUI code 2017-06-02 19:50:14 +00:00
Pascal-Nicolas Becker
81e171ec24 Merge pull request #1760 from tuub/DS-3582
DS-3582: Reintroduce calls to context.abort() at the end of some JSPs to free db resources.
2017-06-02 12:54:29 +02:00
Pascal-Nicolas Becker
4086e73e0b DS-3582: Any jsp that call UIUtil.obtainContext must free DB resources
Any jsp that call UIUtil.obationContext must either call context.abort
or context.commit to free the database connection to avoid exhausting
the database connection pool.
2017-06-01 17:37:30 +02:00
Tim Donohue
5f827ecbe8 Merge pull request #1759 from AlexanderS/rest-submissions-to-workflow
DS-3281: Start workflow for REST submissions
2017-05-31 13:52:42 -07:00
Alexander Sulfrian
30c4ca0fea DS-3281: Start workflow for REST submissions
If an item is submitted through the REST API (via POST on
/{collection_id}/items) the item should not be published immediately,
but should be approved via the defined workflow.
2017-05-31 18:27:44 +02:00
Terry Brady
094f775b6a Merge pull request #1746 from Georgetown-University-Libraries/ds3594
[DS-3594] Refine unit tests to run against postgres
2017-05-31 08:59:14 -07:00
Terry Brady
593cc085d2 Add comment for null check during sort 2017-05-23 10:23:16 -07:00
Tom Desair
f4cdfb4e65 Revert imports 2017-05-22 17:35:03 +02:00
Tom Desair
b4d8436672 DS-3406: Remove unnecessary commit 2017-05-22 17:17:03 +02:00
Tom Desair
271b6913ab Fix integration tests. Remove Hibernate Sort annotations as a collection name can change and this breaks the Set semantics 2017-05-22 15:06:44 +02:00
Alexander Sulfrian
137384c13f DS-3601: Fix NPE when accessing feedback page without "Referer" header 2017-05-22 12:24:31 +02:00
Tom Desair
72f8f9461b Fix bug so that comparator can be used for sets 2017-05-22 10:52:15 +02:00
Tom Desair
78effeac61 Fixing tests 2017-05-22 09:39:13 +02:00
Yana De Pauw
62c804f1e8 DS-3406: Ordering sub communities and collections 2017-05-22 09:39:12 +02:00
Tim Donohue
40b05ec773 Fix minor compilation error in cherry-pick of PR#1662 2017-05-18 21:03:35 +00:00
Miika Nurminen
a0e91cacd9 [DS-3463] Fix IP authentication for anonymous users
Added group membership check based on context even if no eperson is found. Affects file downloads in (at least) xmlui.
2017-05-18 20:12:34 +00:00
Alexander Sulfrian
90ca4deb35 Fix code style 2017-05-18 11:20:15 +02:00
Alexander Sulfrian
83002c3177 DS-3598: Allow localization of input-forms.xml with XMLUI
This allows separate input-forms.xml for the different locales with
XMLUI. The feature was already present in JSPUI.
2017-05-17 16:05:14 +02:00
Terry Brady
ebf256caa1 Avoid NPE 2017-05-15 14:37:59 -07:00
Terry Brady
1d655e97c9 Make destroy more forgiving of test failures 2017-05-15 14:31:41 -07:00
Terry Brady
d85a2d9153 Avoid handle collision in persistent db 2017-05-15 14:19:39 -07:00
Terry Brady
6f8a8b7f25 change parameter setting for db portability 2017-05-15 13:47:20 -07:00
Generalelektrix
3ea041d4dc DS-3164 Item statistic displays UUID of bitstreams instead of name (#1744)
simple change to return bit.getName() as opposed to return value
2017-05-10 17:16:50 -04:00
Tom Desair (Atmire)
6333fb6706 Ds 3552 read only context and hibernate improvements (#1694)
* Refactor READ ONLY mode in Context and adjust hibernate settings accordingly

* Set Context in READ-ONLY mode when retrieving community lists

* Fix Hibernate EHCache configuration + fix some Hibernate warnings

* Cache authorized actions and group membership when Context is in READ-ONLY mode

* Set default Context mode

* Let ConfigurableBrowse use a READ-ONLY context

* Add 2nd level cache support for Site and EPerson DSpaceObjects

* Added 2nd level caching for Community and Collection

* Fix tests and license checks

* Cache collection and community queries

* Small refactorings + backwards compatibility

* Set Context to READ-ONLY for JSPUI submissions and 'select collection' step

* OAI improvements part 1

* OAI indexing improvements part 1

* OAI indexing improvements part 2

* DS-3552: Only uncache resource policies in AuthorizeService when in read-only

* DS-3552: Additional comment on caching handles

* DS-3552: Fix cache leakage in SolrServiceResourceRestrictionPlugin

* DS-3552: Clear the read-only cache when switching Context modes

* DS-3552: Correct Group 2nd level cache size

* DS-3552: Always clear the cache, except when going from READ_ONLY to READ_ONLY
2017-05-04 14:12:06 -04:00
Hardy Pottinger
f62c32efe6 Merge pull request #1739 from edusperoni/handlebars-4
DS-3387 Upgrade handlebars to v4.
2017-05-04 12:28:15 -04:00
Hardy Pottinger
068be33265 Merge pull request #1707 from Frederic-Atmire/DS-3558
DS 3558 Case-insensitive bot matching option
2017-05-04 10:08:59 -04:00
Eduardo Speroni
3c25e04c08 upgrade grunt-contrib-handlebars to 1.0.0 2017-05-03 21:11:58 -03:00
Pascal-Nicolas Becker
a44b109f7a Merge pull request #1684 from tomdesair/DS-3406_Sort-Communities-and-Collections-with-comparator
DS-3406: Sort communities and collections in-memory using a comparator
2017-05-03 14:37:24 +02:00
frederic
a24b0078c2 Made service for SpringDetector and made SpringDetector delegate to it 2017-05-03 11:15:35 +02:00
Tom Desair
e358cb84d1 DS-3406: Resolve review feedback 2017-05-02 17:59:25 +02:00
frederic
0f51d5ad6a ported DS-3558 from dspace 5 to dspace6 2017-05-02 10:52:59 +02:00
frederic
454b0c9d6a Few tests to test case-(in)sensitive matching 2017-04-28 09:57:22 +02:00
frederic
6e1a5d1df9 made the necessary changes to easily test this class 2017-04-28 09:56:43 +02:00
frederic
b61c821e66 case-insensitive option commented out by default 2017-04-28 09:56:16 +02:00
frederic
fd76b587be wrote tests for botmatching 2017-04-27 14:24:07 +02:00
Eduardo Speroni
f12006fe21 Upgrade handlebars to v4.
Fixed advanced filters to work with handlebars v4. (https://github.com/wycats/handlebars.js/issues/1028)
2017-04-26 16:55:49 -03:00
Tim Donohue
3116c53d5e Merge pull request #1737 from cjuergen/DS-3585-6_x
Fix for DS3585
2017-04-26 11:09:14 -07:00
cjuergen
e2ffbaa3b8 Fix for DS3585 2017-04-26 15:49:28 +02:00
samuel
856e5ad388 DS-3584 when editing an eperson, trying to change its email address is ignored if another user already has that email address 2017-04-26 11:36:08 +02:00
Tom Desair
d2577fa16c DS-3579: Fix tests 2017-04-21 11:45:55 +02:00
Tom Desair
d5f9d9b0db DS-3579: Improve cache usage rdfizer, sub-daily, doi organiser 2017-04-21 11:45:55 +02:00
Tom Desair
e4b26d64ce DS-3579: Improve cache usage harvest 2017-04-21 11:45:55 +02:00
Tom Desair
2dde39abe7 DS-3579: Improve cache usage bitstore-migrate, cleanup, curate, embargo-lifter 2017-04-21 11:45:55 +02:00
Tom Desair
a715ae4d15 DS-3579: Improve cache usage export, import, itemupdate, metadata-export, packager 2017-04-21 11:45:55 +02:00
Tom Desair
e63b3f4c13 DS-3579: Improve cache usage export, import, itemupdate, metadata-export, packager 2017-04-21 11:45:54 +02:00
Tom Desair
acedcacdb3 DS-3579: Improve cache usage update-handle-prefix 2017-04-21 11:45:54 +02:00
Tom Desair
37219a986d DS-3579: checker, checker-emailer, filter-media, generate-sitemaps, index-authority 2017-04-21 11:45:54 +02:00
Tom Desair
a3fc30ad94 DS-3579: Fix tests 2017-04-20 21:55:28 +02:00
Terry Brady
e2862b3058 Merge pull request #1714 from tuub/DS-3575
DS-3575: Rename misguiding find method in ResourcePolicyService
2017-04-20 11:47:20 -07:00
Mark H. Wood
8442e6f395 Merge pull request #1717 from mwoodiupui/DS-3564
[DS-3564] Limit maximum idle database connections by default
2017-04-20 12:39:11 -04:00
Tom Desair
7e1a0a1a0c DS-3552: Fix cache leakage in SolrServiceResourceRestrictionPlugin 2017-04-20 17:40:24 +02:00
Tom Desair
a5d414c0b2 DS-3552: Additional comment on caching handles 2017-04-20 17:36:10 +02:00
Tom Desair
cabb4fab66 DS-3579: Improve cache usage rdfizer, sub-daily, doi organiser 2017-04-20 17:33:07 +02:00
Tom Desair
5c19bb52e0 DS-3579: Improve cache usage harvest 2017-04-20 17:32:26 +02:00
Tom Desair
1e62dfdbbc DS-3579: Improve cache usage bitstore-migrate, cleanup, curate, embargo-lifter 2017-04-20 17:31:49 +02:00
Tom Desair
867ab6c9b9 DS-3579: Improve cache usage export, import, itemupdate, metadata-export, packager 2017-04-20 17:30:37 +02:00
Tom Desair
392dd2653a DS-3579: Improve cache usage export, import, itemupdate, metadata-export, packager 2017-04-20 17:30:07 +02:00
Tom Desair
6f3546f844 DS-3579: Improve cache usage update-handle-prefix 2017-04-20 17:28:28 +02:00
Tim Donohue
9a0d293abf Merge pull request #1720 from Georgetown-University-Libraries/ds3516-6x
[DS-3516] 6x Port ImageMagick PDF Thumbnail class should only process PDFs
2017-04-20 06:56:08 -07:00
Philip Vissenaekens
782a963916 DS-2359 2017-04-20 13:10:39 +02:00
Tom Desair
0235ba391f DS-3579: checker, checker-emailer, filter-media, generate-sitemaps, index-authority 2017-04-20 10:41:51 +02:00
Alan Orth
eae5a96179 port PR1709 to 6x 2017-04-19 14:44:28 -07:00
Mark H. Wood
1ef1170159 [DS-3564] Limit maximum idle database connections by default 2017-04-19 14:56:44 -04:00
Tim Donohue
4f7410232a Merge pull request #1682 from tuub/DS-3535
[DS-3535] Reduced error logging by interrupted download
2017-04-19 09:45:05 -07:00
Tim Donohue
6c29cd61b6 Merge pull request #1699 from enrique/patch-1
DS-3554: Check for empty title in Submissions
2017-04-19 09:32:06 -07:00
Tim Donohue
f6a651d4df Merge pull request #1703 from samuelcambien/DS-3553
DS-3553: when creating a new version, do context complete before redirecting to the submission page
2017-04-19 09:27:14 -07:00
Tim Donohue
c57b443611 Merge pull request #1713 from atmire/DS-3573-Filtername-in-XMLUI-Discovery-filter-labels-dspace6
DS-3573: Filtername in XMLUI Discovery filter labels
2017-04-19 09:19:54 -07:00
Pascal-Nicolas Becker
a5bdff0803 DS-3575: Rename misguiding find method in ResourcePolicyService 2017-04-18 18:12:32 +02:00
samuel
e3f72b280d DS-3553: when creating a new version, do context complete before redirecting to the submission page 2017-04-18 11:01:47 +02:00
Yana De Pauw
63ed4cc1e0 DS-3573: Filtername in XMLUI Discovery filter labels 2017-04-14 15:26:08 +02:00
Tom Desair
f0a5e7d380 DS-3552: Only uncache resource policies in AuthorizeService when in read-only 2017-04-14 09:26:08 +02:00
Tom Desair
1e64850af2 OAI indexing improvements part 2 2017-04-14 00:40:19 +02:00
Tom Desair
d9db5a66ca OAI indexing improvements part 1 2017-04-14 00:21:03 +02:00
Tom Desair
5f77bd441a OAI improvements part 1 2017-04-13 17:44:21 +02:00
frederic
4b87935cbb DS-3558 removed duplicate code and changed default option 2017-04-13 16:27:19 +02:00
Tim Donohue
3db74c7ba3 Merge pull request #1671 from mwoodiupui/DS-3505
[DS-3505] Bad redirection from logout action
2017-04-12 13:37:17 -07:00
frederic
f000b280c1 DS-3558 added comments on code 2017-04-12 15:04:57 +02:00
frederic
cad79dc6c9 DS-3558 made case insensitive botsearch configurable and optimized case insensitive pattern matching 2017-04-12 14:29:58 +02:00
Enrique Martínez Zúñiga
794600b96e Fix for DS-3554
Use StringUtils.isNotBlank instead of only check for title.lenght
2017-04-05 09:31:20 -05:00
Tom Desair
044ba1acd3 DS-2748: Do not throw an exception in the PageNotFoundTransformer but do return a 404 error code 2017-04-05 15:45:32 +02:00
Tom Desair
f54fe5c12e Set Context to READ-ONLY for JSPUI submissions and 'select collection' step 2017-04-05 15:23:16 +02:00
Tom Desair
1e917ed845 Small refactorings + backwards compatibility 2017-04-05 11:02:58 +02:00
Tom Desair
7719848d47 Cache collection and community queries 2017-04-05 09:59:31 +02:00
Tom Desair
f0e9e04a3a Fix tests and license checks 2017-04-04 13:44:38 +02:00
Tom Desair
5f194334ff Added 2nd level caching for Community and Collection 2017-04-04 13:16:13 +02:00
Tom Desair
7371a7c71d Add 2nd level cache support for Site and EPerson DSpaceObjects 2017-04-03 16:21:14 +02:00
Tom Desair
3963c95f6e Let ConfigurableBrowse use a READ-ONLY context 2017-04-03 15:59:13 +02:00
Tom Desair
75497f5107 Set default Context mode 2017-04-03 15:54:18 +02:00
Tom Desair
852c4d3b62 Cache authorized actions and group membership when Context is in READ-ONLY mode 2017-04-03 15:26:29 +02:00
Tom Desair
d108464a3a Fix Hibernate EHCache configuration + fix some Hibernate warnings 2017-04-03 15:26:29 +02:00
Tom Desair
dbfc8ce9a7 Set Context in READ-ONLY mode when retrieving community lists 2017-04-03 15:26:28 +02:00
Tom Desair
eee4923518 Refactor READ ONLY mode in Context and adjust hibernate settings accordingly 2017-04-03 15:26:28 +02:00
Toni Prieto
9ef505498b [DS-2947] DIM crosswalks repeats authority & confidence values in the metadata values 2017-03-24 16:16:31 +00:00
Tom Desair
3540fe5ec6 DS-3406: Sort communities and collections in-memory using a comparator 2017-03-23 15:27:02 +01:00
Tim Donohue
57f2a10da1 Merge pull request #1663 from mwoodiupui/DS-1140
[DS-1140] Update MSWord Media Filter to use Apache POI (like PPT Filter) and also support .docx
2017-03-22 10:31:35 -05:00
Per Broman
1e33e27a84 [DS-3535] Reduced error logging by interrupted download 2017-03-21 10:29:06 +01:00
Pascal-Nicolas Becker
a54bf11b8c Merge pull request #1673 from tuub/DS-3523
[DS-3523] Bugfix for search with embargoed thumbnails
2017-03-09 12:38:58 +01:00
Per Broman
0601e9f061 [DS-3523] Bugfix for search with embargoed thumbnails 2017-03-09 12:07:52 +01:00
Mark H. Wood
b578abd054 [DS-3505] On logout redirect to dspace.url, not context path. 2017-03-08 15:51:01 -05:00
Terry Brady
bc8629b145 [DS-3348] Drop date check in EmbargoService (#1542)
* Drop date check in EmbargoService

* Revise comment per review
2017-03-08 18:29:12 +00:00
Peter Dietz
26859b1133 DS-3366 Fix handleresolver by removing out.close (#1560) 2017-03-08 18:25:38 +00:00
Andrea Schweer
97785d778f [DS-3336] Properly sort collections in move item drop-down 2017-03-08 18:08:30 +00:00
Terry Brady
f1c3a9d919 fix typo in comment 2017-03-08 17:44:30 +00:00
Terry Brady
6442c979aa First attempt to resort submitters 2017-03-08 17:44:12 +00:00
Tim Donohue
a36f5b1f48 Merge pull request #1670 from tuub/DS-3521
[DS-3521] Bugfix browsing embargoed thumbnail
2017-03-08 09:51:56 -06:00
Per Broman
36a87c2107 [DS-3521] Bugfix browsing embargoed thumbnail 2017-03-07 12:09:28 +01:00
Mark H. Wood
43d7cd564c [DS-1140] Add configuration data 2017-03-02 15:49:34 -05:00
Mark H. Wood
9d8738c934 [DS-1140] Add unit test. 2017-03-02 14:50:14 -05:00
Mark H. Wood
c09edc5a15 [DS-1140] No need to treat old and new Word formats differently 2017-03-02 14:49:24 -05:00
Tim Donohue
2d95c7a2a1 Merge pull request #1652 from Georgetown-University-Libraries/ds3282-6x
[DS-3282] 6x Fix js error for filters with dashes
2017-03-01 14:59:47 -06:00
Terry Brady
d2c43b8aa5 Merge pull request #1654 from Georgetown-University-Libraries/ds2789-6_x
[DS-2789] 6x Display a "restricted image" for a thumbnail if the bitstream is restricted
2017-03-01 12:53:44 -08:00
Terry Brady
5d9dd4d4e3 Merge pull request #1660 from Georgetown-University-Libraries/ds3283-6x2
[DS-3283] 6x Mirage2: Edit Collection Source - No Field Label for Set Id
2017-03-01 12:42:38 -08:00
Mark H. Wood
24c1f5367c [DS-1140] New POI-based MS Word extractor and some comment cleanup 2017-02-28 17:12:23 -05:00
Hardy Pottinger
fbaf950388 [DS-3475] adding more guidance to example local.cfg as per suggestion of Tim Donohue 2017-02-28 16:10:08 -06:00
Hardy Pottinger
ddedfa2a14 [DS-3475] added back assetstore.dir configuration to dspace.cfg 2017-02-28 16:07:58 -06:00
Terry W Brady
2b96f9472b Add default lock icon for Mirage theme 2017-02-27 14:10:02 -08:00
Terry W Brady
1af23f2d8b reapply pr from master 2017-02-27 14:10:02 -08:00
Terry W Brady
a868a4bc9b Re-applying changes 2017-02-27 13:45:53 -08:00
Tim Donohue
2734dca1cd Merge pull request #1659 from tdonohue/fix_travis_timeouts
Fix Travis CI Maven download timeouts
2017-02-27 15:36:07 -06:00
Tim Donohue
8c70f9bc8c Workaround for travis-ci/travis-ci#4629 2017-02-27 21:21:08 +00:00
Tom Desair
8d56e828a2 DS-3367: Fix authorization error when non-admin users claim a configurable workflow task 2017-02-23 16:28:37 -05:00
Mark H. Wood
0e8c95a196 Merge pull request #1651 from mwoodiupui/DS-3378
[DS-3378] Patch to restore lost indices, from Adan Roman
2017-02-23 16:06:08 -05:00
Terry Brady
cf190c78e8 Fix js error for filters with dashes 2017-02-23 09:40:10 -08:00
Mark H. Wood
2d1c59ac49 [DS-3378] Patch to restore lost indices, from Adan Roman 2017-02-22 17:24:46 -05:00
Tom Desair
3a03e7a9d3 DS-2952: Added missing license 2017-02-22 20:26:42 +00:00
Tom Desair
757264c1f6 DS-2952: Only prepend new line if we have an actual input stream 2017-02-22 20:26:33 +00:00
Tom Desair
dfe6d79da4 DS-2952: Small improvements to FullTextContentStreams and added a unit test for it 2017-02-22 20:26:23 +00:00
Tom Desair
708fe215b0 DS-2952: Use a SequenceInputStream to add the content of multiple full text bitstreams to SOLR 2017-02-22 20:26:09 +00:00
Hardy Pottinger
a51ad3c6eb Merge pull request #1614 from jonas-atmire/DS-3448-MultiSelect-in-Submission
DS-3448 Multi-select in submission for workflow and workspace items
2017-02-22 12:13:12 -06:00
Hardy Pottinger
c5aebee9cc Merge pull request #1649 from hardyoyo/DS-3501-fix-XML-validation-by-excluding-failing-node-packages
[DS-3501] adjust XML validation
2017-02-22 11:17:38 -06:00
Hardy Pottinger
8a06522fa9 [DS-3501] adjust XML validation to skip contents of any folder that includes the text node/node_modules 2017-02-22 16:41:35 +00:00
samuel
267518ebaf DS 3425 outputstream gets closed in JSONDiscoverySearcher 2017-02-21 21:34:29 +00:00
samuel
2685cd793e DS-3415 - administrative.js doEditCommunity wrong parameter name 2017-02-21 21:03:19 +00:00
Tim Donohue
36c7fa9c1a Merge pull request #1588 from atmire/DS-3419-6_x
DS-3419
2017-02-21 14:55:56 -06:00
Bram Luyten
54c5c2932b DS-2840 sidebar facet logging from INFO to DEBUG
Changes INFO level sidebar facet transformer log entries to DEBUG
2017-02-18 14:20:08 +01:00
Luigi Andrea Pascarelli
7225f2597a DS-3356 add turnoff authz system 2017-02-15 22:10:18 +00:00
Mark H. Wood
59632413c2 [DS-3469] virus scan during submission attempts to read uploaded bitstream as anonymous user, which fails (#1632)
* [DS-3469] Add the current session context to the curation task run.

* [DS-3469] Log how I/O failed, not just that it did.

* [DS-3469] Keep reference to Bundle from which we just removed the Bitstream instead of expecting the List of Bundle to be unaltered.

* [DS-3469] Finish switching from e.getMessage() to e

* [DS-3469] Note the side effect of calling curate() with a Context.
2017-02-08 10:32:29 -06:00
Tim Donohue
7650af1e69 Merge pull request #1639 from rradillen/DS-3473
DS-3473: add guard code in case no dot is present in bitstream name
2017-02-08 10:24:28 -06:00
Tim Donohue
e4659832a0 Merge pull request #1641 from cjuergen/DS-3479-6_x
Fix for DS-3479 preventing the import of empty metadata
2017-02-08 10:15:26 -06:00
Tim Donohue
ab982e4f0b Merge pull request #1613 from tomdesair/DS-3436-Sharding-corrupts-multivalued-fields
DS-3436 Sharding SOLR cores corrupts multivalued fields
2017-02-08 09:47:22 -06:00
Terry Brady
8d76aa2010 [DS-3456] 6x Fix Command Line Parameters for statistics import/export tools (#1624)
* Clarify command line args

* support flexible import/export of stats

* Fix DS-3464 solr-reindex-statistics for shard

* Preserve multi val fields on import/export

* Time zone consistency in shard name creation

* Migrate PR feedback from 5x to 6x

* whitespace
2017-02-08 09:43:03 -06:00
Tim Donohue
9eb7c6734c Merge pull request #1633 from Georgetown-University-Libraries/ds3457b
[DS-3457] Address tomcat hang when multiple solr shards exist in DSpace 6
2017-02-08 09:30:42 -06:00
cjuergen
99c1af8688 Fix for DS-3479 preventing the import of empty metadata 2017-02-06 15:11:14 +01:00
Roeland Dillen
866bfe8fd8 add guard code in case no dot is present in bitsream name 2017-02-05 13:45:40 +01:00
Terry Brady
12de02c7f3 Merge pull request #1637 from kshepherd/DS-3477
[DS-3477] fix altmetrics config lookups in item-view.xsl (6.x)
2017-02-02 09:56:12 -08:00
Kim Shepherd
0c0b280d05 [DS-3477] fix altmetrics config lookups in item-view.xsl 2017-02-02 18:04:36 +13:00
Hardy Pottinger
bf1979fd41 [DS-3475] adding more guidance to example local.cfg as per suggestion of Tim Donohue 2017-02-01 15:49:19 -06:00
Hardy Pottinger
e32b93bae3 [DS-3475] added back assetstore.dir configuration to dspace.cfg 2017-02-01 15:48:51 -06:00
kshepherd
f86fff9063 Merge pull request #1611 from tomdesair/DS-3446-DSpace-6x_Non-admin-submitter-cannot-remove-bitstream
DS-3446: On bitstream delete, remove policies only after the bitstream has been updated
2017-02-02 09:42:33 +13:00
Terry W Brady
f7cadf8774 Initialize solr shards at first stats post
Make it more likely that the shards are awake on first use
2017-01-31 15:02:55 -08:00
Terry W Brady
4f7520d532 Additional comments 2017-01-30 17:05:04 -08:00
Terry W Brady
9904fdb412 DS-3457 and DS-3458 fixes 2017-01-30 12:11:06 -08:00
Terry Brady
e0e223e2bf [DS-3468] 6x Ignore bin directory built by Eclipse (#1627)
* Exclude top level /bin directory built by Eclipse
2017-01-26 16:28:25 +01:00
Hardy Pottinger
45762e993d Merge pull request #1617 from jonas-atmire/DS-3445-ChecksumChecker-no-enum-constant-error
DS-3445 Only add "ResultCode" if not default
2017-01-19 10:15:11 -06:00
Andrew Bennet
ce72010805 [DS-3460] Fix incorrect REST documentation 2017-01-17 21:32:40 +01:00
Bram Luyten
faa12bfd33 Merge pull request #1610 from tomdesair/DS-3108-DSpace-6x_Support-non-email-based-authentication-in-REST-API
DS-3108 DSpace 6x: Support non-email based authentication in REST API
2017-01-14 11:44:35 +01:00
Jonas Van Goolen
2805386f9d DS-3445 Only add "ResultCode" if not default 2017-01-13 10:41:30 +01:00
Jonas Van Goolen
a62eddeb59 DS-3448 Removal of unnecessary duplicate javascript file 2017-01-13 09:43:43 +01:00
Jonas Van Goolen
c873e554d3 DS-3448 Multi-select in submission for workflow and workspace items -> License headers in new files 2017-01-12 13:52:21 +01:00
Jonas Van Goolen
01dee698c2 DS-3448 Multi-select in submission for workflow and workspace items 2017-01-11 15:33:25 +01:00
Tom Desair
eb5dc58384 DS-3436: Tell SOLR to split values of multi-valued fields when sharding cores 2017-01-11 12:55:10 +01:00
Tim Donohue
958631c81c Merge pull request #1600 from samuelcambien/dspace-6_x-DS-3435
DS-3435 possible nullpointerexception at AccessStepUtil$populateEmbar…
2017-01-10 09:04:35 -06:00
Tom Desair
89ded55942 DS-3108 DSpace 6 only: Revert rename REST API login paramter email to user 2017-01-10 14:04:01 +01:00
Tom Desair
9855022228 Revert "DS-3108: Rename REST API login paramter email to user"
This reverts commit d2c4233d9e.
2017-01-10 13:57:29 +01:00
Tom Desair
bfc68d3354 DS-3446: Remove policies only after the bitstream has been updated (otherwise the current user has not WRITE rights) 2017-01-09 22:53:52 +01:00
Tom Desair
38848e16d3 DS-3108: Update REST API authentication documentation
Conflicts:
	dspace-rest/src/main/java/org/dspace/rest/RestIndex.java
2017-01-09 17:33:58 +01:00
Tom Desair
0244a425ae DS-3108: Remove deprication since there is no alternative 2017-01-09 17:32:55 +01:00
Tom Desair
c3c5287880 DS-3108: Remove unused imports 2017-01-09 17:32:49 +01:00
Tom Desair
3321cba560 DS-3108: Remove unnecessary /login-shibboleth endpoint
Conflicts:
	dspace-rest/src/main/java/org/dspace/rest/RestIndex.java
2017-01-09 17:32:45 +01:00
Tom Desair
684e87ed20 DS-3108: Return FORBIDDEN error code when authentication on the REST API failed
Conflicts:
	dspace-rest/src/main/java/org/dspace/rest/RestIndex.java
2017-01-09 17:31:24 +01:00
Tom Desair
d2c4233d9e DS-3108: Rename REST API login paramter email to user 2017-01-09 17:30:38 +01:00
Tom Desair
ae9862395a DS-3108: Support authenticaton mechanisms where the e-mail attribute is not an e-mail address 2017-01-09 17:30:26 +01:00
Tim Donohue
6256c673b9 Merge pull request #1607 from bram-atmire/DS-3289
DS-3289 Removing double slashes in image paths
2017-01-09 09:17:23 -06:00
Bram Luyten
2b0448fe64 DS-3289 Removing double slashes in image paths 2017-01-07 18:22:03 +01:00
cjuergen
1e4ae0b5e3 Cherry pick DS-3440 solution d95902b 2017-01-06 19:09:44 +01:00
Bram Luyten
1f36899abe Merge pull request #1605 from 4Science/DS-3441-6x
DS-3441 READ permssion on the Collection object not respected by the JSPUI (6_x)
2017-01-06 18:18:50 +01:00
Andrea Bollini
a6aa9816d2 DS-3441 READ permssion on the Collection object not respected by the JSPUI 2017-01-06 13:56:47 +01:00
Bram Luyten
242d1357c7 Merge pull request #1601 from tomdesair/DS-3381_Workspace-item-not-saved-when-using-versioning
DS-3381 workspace item not saved when using versioning
2017-01-05 16:43:50 +01:00
Tom Desair
4b927562b6 DS-3381: Do an explicit commit so that the workspace item is written to the database before the redirect to the submission form (see versioning.js doCreateNewVersion) 2017-01-04 23:05:20 +01:00
samuel
7b6ea8e807 DS-3435 possible nullpointerexception at AccessStepUtil$populateEmbargoDetail
Conflicts:
	dspace-xmlui/src/main/java/org/dspace/app/xmlui/aspect/submission/submit/AccessStepUtil.java
2017-01-03 12:40:56 +01:00
Philip Vissenaekens
a3c6aa2ced DS-3419 2016-12-09 13:14:55 +01:00
Ivan Masár
50eed239f5 DS-3363 CSV import error says "row", means "column" 2016-11-14 18:28:11 +01:00
Ivan Masár
3065389435 typo: xforwarderfor -> xforwardedfor 2016-11-01 16:18:45 +01:00
3460 changed files with 353921 additions and 119704 deletions

5
.gitignore vendored
View File

@@ -9,7 +9,6 @@ tags
/bin/
.project
.classpath
.checkstyle
## Ignore project files created by IntelliJ IDEA
*.iml
@@ -26,6 +25,7 @@ dist/
nbdist/
nbactions.xml
nb-configuration.xml
META-INF/
## Ignore all *.properties file in root folder, EXCEPT build.properties (the default)
## KEPT FOR BACKWARDS COMPATIBILITY WITH 5.x (build.properties is now replaced with local.cfg)
@@ -39,6 +39,3 @@ nb-configuration.xml
##Mac noise
.DS_Store
##Ignore JRebel project configuration
rebel.xml

View File

@@ -5,23 +5,29 @@ env:
# Give Maven 1GB of memory to work with
- MAVEN_OPTS=-Xmx1024M
jdk:
# DS-3384 Oracle JDK 8 has DocLint enabled by default.
# Let's use this to catch any newly introduced DocLint issues.
- oraclejdk8
## Should we run into any problems with oraclejdk8 on Travis, we may try the following workaround.
## https://docs.travis-ci.com/user/languages/java#Testing-Against-Multiple-JDKs
## https://github.com/travis-ci/travis-ci/issues/3259#issuecomment-130860338
#addons:
# apt:
# packages:
# - oracle-java8-installer
# Install prerequisites for building Mirage2 more rapidly
before_install:
# Remove outdated settings.xml from Travis builds. Workaround for https://github.com/travis-ci/travis-ci/issues/4629
- rm ~/.m2/settings.xml
# Install Node.js 6.5.0 & print version info
- nvm install 6.5.0
- node --version
# Install npm 3.10.8 & print version info
- npm install -g npm@3.10.8
- npm --version
# Install Bower
- npm install -g bower
# Install Grunt & print version info
- npm install -g grunt && npm install -g grunt-cli
- grunt --version
# Print ruby version info (should be installed)
- ruby -v
# Install Sass & print version info
- gem install sass -v 3.3.14
- sass -v
# Install Compass & print version info
- gem install compass -v 1.0.1
- compass version
# Skip install stage, as we'll do it below
install: "echo 'Skipping install stage, dependencies will be downloaded during build and test stages.'"
@@ -39,6 +45,8 @@ script:
# -V => Display Maven version info before build
# -Dsurefire.rerunFailingTestsCount=2 => try again for flakey tests, and keep track of/report on number of retries
- "mvn clean install license:check -Dmaven.test.skip=false -DskipITs=false -P !assembly -B -V -Dsurefire.rerunFailingTestsCount=2"
# 2. [Assemble DSpace] Ensure overlay & assembly process works (from [src]/dspace/)
# 2. [Assemble DSpace] Ensure assembly process works (from [src]/dspace/), including Mirage 2
# -Dmirage2.on=true => Build Mirage2
# -Dmirage2.deps.included=false => Don't include Mirage2 build dependencies (We installed them in before_install)
# -P !assembly => SKIP the actual building of [src]/dspace/dspace-installer (as it can be memory intensive)
- "cd dspace && mvn package -P !assembly -B -V -Dsurefire.rerunFailingTestsCount=2"
- "cd dspace && mvn package -Dmirage2.on=true -Dmirage2.deps.included=false -P !assembly -B -V -Dsurefire.rerunFailingTestsCount=2"

View File

@@ -1,7 +1,7 @@
DSpace source code license:
Copyright (c) 2002-2016, DuraSpace. All rights reserved.
Copyright (c) 2002-2017, DuraSpace. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are

View File

@@ -199,6 +199,8 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
* Woodstox (org.codehaus.woodstox:woodstox-core-asl:4.1.4 - http://woodstox.codehaus.org)
* Woodstox (org.codehaus.woodstox:wstx-asl:3.2.0 - http://woodstox.codehaus.org)
* Woodstox (org.codehaus.woodstox:wstx-asl:3.2.7 - http://woodstox.codehaus.org)
* databene ContiPerf (org.databene:contiperf:2.3.4 - http://databene.org/contiperf)
* elasticsearch (org.elasticsearch:elasticsearch:1.4.0 - http://nexus.sonatype.org/oss-repository-hosting.html/elasticsearch)
* flyway-core (org.flywaydb:flyway-core:4.0.3 - https://flywaydb.org/flyway-core)
* Ogg and Vorbis for Java, Core (org.gagravarr:vorbis-java-core:0.1 - https://github.com/Gagravarr/VorbisJava)
* Apache Tika plugin for Ogg, Vorbis and FLAC (org.gagravarr:vorbis-java-tika:0.1 - https://github.com/Gagravarr/VorbisJava)
@@ -278,27 +280,27 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
* Morfologik Stemming Dictionary for Polish (org.carrot2:morfologik-polish:1.7.1 - http://morfologik.blogspot.com/morfologik-polish/)
* Morfologik Stemming APIs (org.carrot2:morfologik-stemming:1.7.1 - http://morfologik.blogspot.com/morfologik-stemming/)
* Stax2 API (org.codehaus.woodstox:stax2-api:3.1.1 - http://woodstox.codehaus.org/StAX2)
* DSpace Kernel :: API and Implementation (org.dspace:dspace-api:6.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-api)
* DSpace I18N :: Language Packs (org.dspace:dspace-api-lang:6.0.3 - https://github.com/dspace/dspace-api-lang)
* DSpace JSP-UI (org.dspace:dspace-jspui:6.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-jspui)
* DSpace OAI-PMH (org.dspace:dspace-oai:6.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-oai)
* DSpace RDF (org.dspace:dspace-rdf:6.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-rdf)
* DSpace REST :: API and Implementation (org.dspace:dspace-rest:6.0-rc4-SNAPSHOT - http://demo.dspace.org)
* DSpace Services Framework :: API and Implementation (org.dspace:dspace-services:6.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-services)
* Apache Solr Webapp (org.dspace:dspace-solr:6.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-solr)
* DSpace SWORD (org.dspace:dspace-sword:6.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-sword)
* DSpace SWORD v2 (org.dspace:dspace-swordv2:6.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-swordv2)
* DSpace XML-UI (Manakin) (org.dspace:dspace-xmlui:6.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-xmlui)
* DSpace XML-UI (Manakin) I18N :: Language Packs (org.dspace:dspace-xmlui-lang:6.0.3 - https://github.com/dspace/dspace-xmlui-lang)
* DSpace Kernel :: API and Implementation (org.dspace:dspace-api:6.2-SNAPSHOT - https://github.com/dspace/DSpace/dspace-api)
* DSpace I18N :: Language Packs (org.dspace:dspace-api-lang:6.0.5 - https://github.com/dspace/dspace-api-lang)
* DSpace JSP-UI (org.dspace:dspace-jspui:6.2-SNAPSHOT - https://github.com/dspace/DSpace/dspace-jspui)
* DSpace OAI-PMH (org.dspace:dspace-oai:6.2-SNAPSHOT - https://github.com/dspace/DSpace/dspace-oai)
* DSpace RDF (org.dspace:dspace-rdf:6.2-SNAPSHOT - https://github.com/dspace/DSpace/dspace-rdf)
* DSpace REST :: API and Implementation (org.dspace:dspace-rest:6.2-SNAPSHOT - http://demo.dspace.org)
* DSpace Services Framework :: API and Implementation (org.dspace:dspace-services:6.2-SNAPSHOT - https://github.com/dspace/DSpace/dspace-services)
* Apache Solr Webapp (org.dspace:dspace-solr:6.2-SNAPSHOT - https://github.com/dspace/DSpace/dspace-solr)
* DSpace SWORD (org.dspace:dspace-sword:6.2-SNAPSHOT - https://github.com/dspace/DSpace/dspace-sword)
* DSpace SWORD v2 (org.dspace:dspace-swordv2:6.2-SNAPSHOT - https://github.com/dspace/DSpace/dspace-swordv2)
* DSpace XML-UI (Manakin) (org.dspace:dspace-xmlui:6.2-SNAPSHOT - https://github.com/dspace/DSpace/dspace-xmlui)
* DSpace XML-UI (Manakin) I18N :: Language Packs (org.dspace:dspace-xmlui-lang:6.0.6 - https://github.com/dspace/dspace-xmlui-lang)
* handle (org.dspace:handle:6.2 - no url defined)
* jargon (org.dspace:jargon:1.4.25 - no url defined)
* mets (org.dspace:mets:1.5.2 - no url defined)
* oclc-harvester2 (org.dspace:oclc-harvester2:0.1.12 - no url defined)
* XOAI : OAI-PMH Java Toolkit (org.dspace:xoai:3.2.10 - http://nexus.sonatype.org/oss-repository-hosting.html/xoai)
* Repackaged Cocoon Servlet Service Implementation (org.dspace.dependencies.cocoon:dspace-cocoon-servlet-service-impl:1.0.3 - http://projects.dspace.org/dspace-pom/dspace-cocoon-servlet-service-impl)
* DSpace Kernel :: Additions and Local Customizations (org.dspace.modules:additions:6.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/modules/additions)
* DSpace Kernel :: Additions and Local Customizations (org.dspace.modules:additions:6.2-SNAPSHOT - https://github.com/dspace/DSpace/modules/additions)
* Hamcrest All (org.hamcrest:hamcrest-all:1.3 - https://github.com/hamcrest/JavaHamcrest/hamcrest-all)
* Hamcrest Core (org.hamcrest:hamcrest-all:1.3 - https://github.com/hamcrest/JavaHamcrest/hamcrest-all)
* Hamcrest Core (org.hamcrest:hamcrest-core:1.3 - https://github.com/hamcrest/JavaHamcrest/hamcrest-core)
* JBibTeX (org.jbibtex:jbibtex:1.0.10 - http://www.jbibtex.org)
* ASM Core (org.ow2.asm:asm:4.1 - http://asm.objectweb.org/asm/)
* ASM Analysis (org.ow2.asm:asm-analysis:4.1 - http://asm.objectweb.org/asm-analysis/)

View File

@@ -1,9 +1,6 @@
# DSpace
## NOTE: The rest-tutorial branch has been created to support the [DSpace 7 REST documentation](https://dspace-labs.github.io/DSpace7RestTutorial/walkthrough/intro)
- This branch provides stable, referencable line numbers in code
[![Build Status](https://travis-ci.org/DSpace/DSpace.png?branch=master)](https://travis-ci.org/DSpace/DSpace)
[DSpace Documentation](https://wiki.duraspace.org/display/DSDOC/) |
@@ -12,20 +9,9 @@
[Support](https://wiki.duraspace.org/display/DSPACE/Support)
DSpace open source software is a turnkey repository application used by more than
2,000 organizations and institutions worldwide to provide durable access to digital resources.
1000+ organizations and institutions worldwide to provide durable access to digital resources.
For more information, visit http://www.dspace.org/
***
:warning: **Work on DSpace 7 has begun on our `master` branch.** This means that there is temporarily NO user interface on this `master` branch. DSpace 7 will feature a new, unified [Angular](https://angular.io/) user interface, along with an enhanced, rebuilt REST API. The latest status of this work can be found on the [DSpace 7 UI Working Group](https://wiki.duraspace.org/display/DSPACE/DSpace+7+UI+Working+Group) page. Additionally, the codebases can be found in the following places:
* DSpace 7 REST API work is occurring on the [`master` branch](https://github.com/DSpace/DSpace/tree/master/dspace-spring-rest) of this repository.
* The REST Contract is being documented at https://github.com/DSpace/Rest7Contract
* DSpace 7 Angular UI work is occurring at https://github.com/DSpace/dspace-angular
**If you would like to get involved in our DSpace 7 development effort, we welcome new contributors.** Just join one of our meetings or get in touch via Slack. See the [DSpace 7 UI Working Group](https://wiki.duraspace.org/display/DSPACE/DSpace+7+UI+Working+Group) wiki page for more info.
**If you are looking for the ongoing maintenance work for DSpace 6 (or prior releases)**, you can find that work on the corresponding maintenance branch (e.g. [`dspace-6_x`](https://github.com/DSpace/DSpace/tree/dspace-6_x)) in this repository.
***
## Downloads
The latest release of DSpace can be downloaded from the [DSpace website](http://www.dspace.org/latest-release/) or from [GitHub](https://github.com/DSpace/DSpace/releases).
@@ -67,8 +53,6 @@ We welcome everyone to participate in these lists:
* [dspace-tech@googlegroups.com](https://groups.google.com/d/forum/dspace-tech) : Technical support mailing list. See also our guide for [How to troubleshoot an error](https://wiki.duraspace.org/display/DSPACE/Troubleshoot+an+error).
* [dspace-devel@googlegroups.com](https://groups.google.com/d/forum/dspace-devel) : Developers / Development mailing list
Great Q&A is also available under the [DSpace tag on Stackoverflow](http://stackoverflow.com/questions/tagged/dspace)
Additional support options are listed at https://wiki.duraspace.org/display/DSPACE/Support
DSpace also has an active service provider network. If you'd rather hire a service provider to

View File

@@ -1,10 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE suppressions PUBLIC
"-//Puppy Crawl//DTD Suppressions 1.2//EN"
"http://checkstyle.sourceforge.net/dtds/suppressions_1_2.dtd">
<suppressions>
<!-- Temporarily suppress indentation checks for all Tests -->
<!-- TODO: We should have these turned on. But, currently there's a known bug with indentation checks
on JMockIt Expectations blocks and similar. See https://github.com/checkstyle/checkstyle/issues/3739 -->
<suppress checks="Indentation" files="src[/\\]test[/\\]java"/>
</suppressions>

View File

@@ -1,144 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE module PUBLIC
"-//Puppy Crawl//DTD Check Configuration 1.3//EN"
"http://checkstyle.sourceforge.net/dtds/configuration_1_3.dtd">
<!--
DSpace CodeStyle Requirements
1. 4-space indents for Java, and 2-space indents for XML. NO TABS ALLOWED.
2. K&R style braces required. Braces required on all blocks.
3. Do not use wildcard imports (e.g. import java.util.*). Duplicated or unused imports also not allowed.
4. Javadocs should exist for all public classes and methods. (Methods rule is unenforced at this time.) Keep it short and to the point
5. Maximum line length is 120 characters (except for long URLs, packages or imports)
6. No trailing spaces allowed (except in comments)
7. Tokens should be surrounded by whitespace (see http://checkstyle.sourceforge.net/config_whitespace.html#WhitespaceAround)
8. Each source file must include our license header (validated separately by license-maven-plugin, see pom.xml)
For more information on CheckStyle configurations below, see: http://checkstyle.sourceforge.net/checks.html
-->
<module name="Checker">
<!-- Configure checker to use UTF-8 encoding -->
<property name="charset" value="UTF-8"/>
<!-- Configure checker to run on files with these extensions -->
<property name="fileExtensions" value="java, properties, cfg, xml"/>
<!-- Suppression configurations in checkstyle-suppressions.xml in same directory -->
<module name="SuppressionFilter">
<property name="file" value="${checkstyle.suppressions.file}" default="checkstyle-suppressions.xml"/>
</module>
<!-- No tab characters ('\t') allowed in the source code -->
<module name="FileTabCharacter">
<property name="eachLine" value="true"/>
<property name="fileExtensions" value="java, properties, cfg, css, js, xml"/>
</module>
<!-- No Trailing Whitespace, except on lines that only have an asterisk (e.g. Javadoc comments) -->
<module name="RegexpSingleline">
<property name="format" value="(?&lt;!\*)\s+$|\*\s\s+$"/>
<property name="message" value="Line has trailing whitespace"/>
<property name="fileExtensions" value="java, properties, cfg, css, js, xml"/>
</module>
<!-- Allow individual lines of code to be excluded from these rules, if they are annotated
with @SuppressWarnings. See also SuppressWarningsHolder below -->
<module name="SuppressWarningsFilter" />
<!-- Check individual Java source files for specific rules -->
<module name="TreeWalker">
<!-- Maximum line length is 120 characters -->
<module name="LineLength">
<property name="max" value="120"/>
<!-- Only exceptions for packages, imports, URLs, and JavaDoc {@link} tags -->
<property name="ignorePattern" value="^package.*|^import.*|http://|https://|@link"/>
</module>
<!-- Highlight any TODO or FIXME comments in info messages -->
<module name="TodoComment">
<property name="severity" value="info"/>
<property name="format" value="(TODO)|(FIXME)"/>
</module>
<!-- Do not report errors on any lines annotated with @SuppressWarnings -->
<module name="SuppressWarningsHolder"/>
<!-- ##### Import statement requirements ##### -->
<!-- Star imports (e.g. import java.util.*) are NOT ALLOWED -->
<module name="AvoidStarImport"/>
<!-- Redundant import statements are NOT ALLOWED -->
<module name="RedundantImport"/>
<!-- Unused import statements are NOT ALLOWED -->
<module name="UnusedImports"/>
<!-- Ensure imports appear alphabetically and grouped -->
<module name="CustomImportOrder">
<property name="sortImportsInGroupAlphabetically" value="true"/>
<property name="separateLineBetweenGroups" value="true"/>
<property name="customImportOrderRules" value="STATIC###STANDARD_JAVA_PACKAGE###THIRD_PARTY_PACKAGE"/>
</module>
<!-- ##### Javadocs requirements ##### -->
<!-- Requirements for Javadocs for classes/interfaces -->
<module name="JavadocType">
<!-- All public classes/interfaces MUST HAVE Javadocs -->
<property name="scope" value="public"/>
<!-- Add an exception for anonymous inner classes -->
<property name="excludeScope" value="anoninner"/>
<!-- Ignore errors related to unknown tags -->
<property name="allowUnknownTags" value="true"/>
<!-- Allow params tags to be optional -->
<property name="allowMissingParamTags" value="false"/>
</module>
<!-- Requirements for Javadocs for methods -->
<module name="JavadocMethod">
<!-- All public methods MUST HAVE Javadocs -->
<!-- <property name="scope" value="public"/> -->
<!-- TODO: Above rule has been disabled because of large amount of missing public method Javadocs -->
<property name="scope" value="nothing"/>
<!-- Allow RuntimeExceptions to be undeclared -->
<property name="allowUndeclaredRTE" value="true"/>
<!-- Allow params, throws and return tags to be optional -->
<property name="allowMissingParamTags" value="true"/>
<property name="allowMissingThrowsTags" value="true"/>
<property name="allowMissingReturnTag" value="true"/>
</module>
<!-- ##### Requirements for K&R Style braces ##### -->
<!-- Code blocks MUST HAVE braces, even single line statements (if, while, etc) -->
<module name="NeedBraces"/>
<!-- Left braces should be at the end of current line (default value)-->
<module name="LeftCurly"/>
<!-- Right braces should be on start of a new line (default value) -->
<module name="RightCurly"/>
<!-- ##### Indentation / Whitespace requirements ##### -->
<!-- Require 4-space indentation (default value) -->
<module name="Indentation"/>
<!-- Whitespace should exist around all major tokens -->
<module name="WhitespaceAround">
<!-- However, make an exception for empty constructors, methods, types, etc. -->
<property name="allowEmptyConstructors" value="true"/>
<property name="allowEmptyMethods" value="true"/>
<property name="allowEmptyTypes" value="true"/>
<property name="allowEmptyLoops" value="true"/>
</module>
<!-- Validate whitespace around Generics (angle brackets) per typical conventions
http://checkstyle.sourceforge.net/config_whitespace.html#GenericWhitespace -->
<module name="GenericWhitespace"/>
<!-- ##### Requirements for "switch" statements ##### -->
<!-- "switch" statements MUST have a "default" clause -->
<module name="MissingSwitchDefault"/>
<!-- "case" clauses in switch statements MUST include break, return, throw or continue -->
<module name="FallThrough"/>
<!-- ##### Other / Miscellaneous requirements ##### -->
<!-- Require utility classes do not have a public constructor -->
<module name="HideUtilityClassConstructor"/>
<!-- Require each variable declaration is its own statement on its own line -->
<module name="MultipleVariableDeclarations"/>
<!-- Each line of code can only include one statement -->
<module name="OneStatementPerLine"/>
<!-- Require that "catch" statements are not empty (must at least contain a comment) -->
<module name="EmptyCatchBlock"/>
</module>
</module>

View File

@@ -1,5 +1,4 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.dspace</groupId>
<artifactId>dspace-api</artifactId>
@@ -13,7 +12,7 @@
<parent>
<groupId>org.dspace</groupId>
<artifactId>dspace-parent</artifactId>
<version>7.0-SNAPSHOT</version>
<version>6.2</version>
<relativePath>..</relativePath>
</parent>
@@ -206,15 +205,14 @@
<executions>
<execution>
<id>setproperty</id>
<phase>generate-test-resources
</phase> <!-- XXX I think this should be 'initialize' - MHW -->
<phase>generate-test-resources</phase> <!-- XXX I think this should be 'initialize' - MHW -->
<goals>
<goal>execute</goal>
</goals>
<configuration>
<source>
project.properties['agnostic.build.dir'] = project.build.directory.replace(File.separator, '/');
println("Initializing Maven property 'agnostic.build.dir' to: " + project.properties['agnostic.build.dir']);
project.properties['agnostic.build.dir']=project.build.directory.replace(File.separator,'/');
println("Initializing Maven property 'agnostic.build.dir' to: " + project.properties['agnostic.build.dir']);
</source>
</configuration>
</execution>
@@ -241,39 +239,43 @@
<artifactId>xml-maven-plugin</artifactId>
<version>1.0.1</version>
<executions>
<execution>
<id>validate-ALL-xml-and-xsl</id>
<phase>process-test-resources</phase>
<goals>
<goal>validate</goal>
</goals>
</execution>
<execution>
<id>validate-ALL-xml-and-xsl</id>
<phase>process-test-resources</phase>
<goals>
<goal>validate</goal>
</goals>
</execution>
</executions>
<configuration>
<validationSets>
<!-- validate ALL XML and XSL config files in the testing folder -->
<validationSet>
<dir>${agnostic.build.dir}/testing</dir>
<includes>
<include>**/*.xml</include>
<include>**/*.xsl</include>
<include>**/*.xconf</include>
</includes>
</validationSet>
<!-- validate ALL XML and XSL files throughout the project -->
<validationSet>
<dir>${root.basedir}</dir>
<includes>
<include>**/*.xml</include>
<include>**/*.xsl</include>
<include>**/*.xmap</include>
</includes>
</validationSet>
</validationSets>
<validationSets>
<!-- validate ALL XML and XSL config files in the testing folder -->
<validationSet>
<dir>${agnostic.build.dir}/testing</dir>
<includes>
<include>**/*.xml</include>
<include>**/*.xsl</include>
<include>**/*.xconf</include>
</includes>
</validationSet>
<!-- validate ALL XML and XSL files throughout the project -->
<validationSet>
<dir>${root.basedir}</dir>
<includes>
<include>**/*.xml</include>
<include>**/*.xsl</include>
<include>**/*.xmap</include>
</includes>
<excludes>
<exclude>**/node/node_modules/**</exclude>
</excludes>
</validationSet>
</validationSets>
</configuration>
</plugin>
<!-- Run Integration Testing! This plugin just kicks off the tests (when enabled). -->
<plugin>
<artifactId>maven-failsafe-plugin</artifactId>
@@ -292,6 +294,7 @@
</profile>
</profiles>
<dependencies>
<dependency>
<groupId>org.hibernate</groupId>
@@ -304,24 +307,13 @@
</exclusions>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-ehcache</artifactId>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-validator-cdi</artifactId>
<version>${hibernate-validator.version}</version>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-ehcache</artifactId>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-orm</artifactId>
</dependency>
<dependency>
<groupId>org.glassfish</groupId>
<artifactId>javax.el</artifactId>
<version>3.0.1-b10</version>
</dependency>
<dependency>
<groupId>org.dspace</groupId>
<artifactId>handle</artifactId>
@@ -365,10 +357,6 @@
<groupId>commons-collections</groupId>
<artifactId>commons-collections</artifactId>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-collections4</artifactId>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-dbcp2</artifactId>
@@ -430,8 +418,8 @@
<artifactId>pdfbox</artifactId>
</dependency>
<dependency>
<groupId>org.apache.pdfbox</groupId>
<artifactId>fontbox</artifactId>
<groupId>org.apache.pdfbox</groupId>
<artifactId>fontbox</artifactId>
</dependency>
<dependency>
<groupId>org.bouncycastle</groupId>
@@ -449,6 +437,10 @@
<groupId>org.apache.poi</groupId>
<artifactId>poi-scratchpad</artifactId>
</dependency>
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi-ooxml</artifactId>
</dependency>
<dependency>
<groupId>rome</groupId>
<artifactId>rome</artifactId>
@@ -511,12 +503,16 @@
<artifactId>h2</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.databene</groupId>
<artifactId>contiperf</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.rometools</groupId>
<artifactId>rome-modules</artifactId>
@@ -580,9 +576,9 @@
<artifactId>commons-configuration</artifactId>
</dependency>
<dependency>
<groupId>com.maxmind.geoip2</groupId>
<artifactId>geoip2</artifactId>
<version>2.11.0</version>
<groupId>com.maxmind.geoip</groupId>
<artifactId>geoip-api</artifactId>
<version>1.3.0</version>
</dependency>
<dependency>
<groupId>org.apache.ant</groupId>
@@ -595,9 +591,9 @@
</dependency>
<dependency>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-core</artifactId>
<version>4.10.4</version>
<groupId>org.elasticsearch</groupId>
<artifactId>elasticsearch</artifactId>
<version>1.4.0</version>
</dependency>
<dependency>
@@ -683,6 +679,7 @@
<dependency>
<groupId>joda-time</groupId>
<artifactId>joda-time</artifactId>
<version>2.9.2</version>
</dependency>
<dependency>
<groupId>javax.inject</groupId>
@@ -707,7 +704,7 @@
<dependency>
<groupId>org.glassfish.jersey.core</groupId>
<artifactId>jersey-client</artifactId>
<version>${jersey.version}</version>
<version>2.22.1</version>
</dependency>
<!-- S3 -->
<dependency>
@@ -729,10 +726,17 @@
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
<version>2.7.0</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>2.7.0</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-annotations</artifactId>
<version>2.7.0</version>
</dependency>
</dependencies>

View File

@@ -19,145 +19,147 @@ package org.apache.solr.handler.extraction;
/**
* The various Solr Parameters names to use when extracting content.
*
**/
public interface ExtractingParams {
/**
* Map all generated attribute names to field names with lowercase and underscores.
*/
public static final String LOWERNAMES = "lowernames";
/**
* Map all generated attribute names to field names with lowercase and underscores.
*/
public static final String LOWERNAMES = "lowernames";
/**
* if true, ignore TikaException (give up to extract text but index meta data)
*/
public static final String IGNORE_TIKA_EXCEPTION = "ignoreTikaException";
/**
* if true, ignore TikaException (give up to extract text but index meta data)
*/
public static final String IGNORE_TIKA_EXCEPTION = "ignoreTikaException";
/**
* The param prefix for mapping Tika metadata to Solr fields.
* <p>
* To map a field, add a name like:
* <pre>fmap.title=solr.title</pre>
*
* In this example, the tika "title" metadata value will be added to a Solr field named "solr.title"
*/
public static final String MAP_PREFIX = "fmap.";
/**
* The param prefix for mapping Tika metadata to Solr fields.
* <p>
* To map a field, add a name like:
* <pre>fmap.title=solr.title</pre>
*
* In this example, the tika "title" metadata value will be added to a Solr field named "solr.title"
*
*
*/
public static final String MAP_PREFIX = "fmap.";
/**
* The boost value for the name of the field. The boost can be specified by a name mapping.
* <p>
* For example
* <pre>
* map.title=solr.title
* boost.solr.title=2.5
* </pre>
* will boost the solr.title field for this document by 2.5
*/
public static final String BOOST_PREFIX = "boost.";
/**
* The boost value for the name of the field. The boost can be specified by a name mapping.
* <p>
* For example
* <pre>
* map.title=solr.title
* boost.solr.title=2.5
* </pre>
* will boost the solr.title field for this document by 2.5
*
*/
public static final String BOOST_PREFIX = "boost.";
/**
* Pass in literal values to be added to the document, as in
* <pre>
* literal.myField=Foo
* </pre>
*/
public static final String LITERALS_PREFIX = "literal.";
/**
* Pass in literal values to be added to the document, as in
* <pre>
* literal.myField=Foo
* </pre>
*
*/
public static final String LITERALS_PREFIX = "literal.";
/**
* Restrict the extracted parts of a document to be indexed
* by passing in an XPath expression. All content that satisfies the XPath expr.
* will be passed to the {@link org.apache.solr.handler.extraction.SolrContentHandler}.
* <p>
* See Tika's docs for what the extracted document looks like.
*
* @see #CAPTURE_ELEMENTS
*/
public static final String XPATH_EXPRESSION = "xpath";
/**
* Restrict the extracted parts of a document to be indexed
* by passing in an XPath expression. All content that satisfies the XPath expr.
* will be passed to the {@link SolrContentHandler}.
* <p>
* See Tika's docs for what the extracted document looks like.
* <p>
* @see #CAPTURE_ELEMENTS
*/
public static final String XPATH_EXPRESSION = "xpath";
/**
* Only extract and return the content, do not index it.
*/
public static final String EXTRACT_ONLY = "extractOnly";
/**
* Only extract and return the content, do not index it.
*/
public static final String EXTRACT_ONLY = "extractOnly";
/**
* Content output format if extractOnly is true. Default is "xml", alternative is "text".
*/
public static final String EXTRACT_FORMAT = "extractFormat";
/**
* Content output format if extractOnly is true. Default is "xml", alternative is "text".
*/
public static final String EXTRACT_FORMAT = "extractFormat";
/**
* Capture attributes separately according to the name of the element, instead of just adding them to the string
* buffer
*/
public static final String CAPTURE_ATTRIBUTES = "captureAttr";
/**
* Capture attributes separately according to the name of the element, instead of just adding them to the string buffer
*/
public static final String CAPTURE_ATTRIBUTES = "captureAttr";
/**
* Literal field values will by default override other values such as metadata and content. Set this to false to
* revert to pre-4.0 behaviour
*/
public static final String LITERALS_OVERRIDE = "literalsOverride";
/**
* Literal field values will by default override other values such as metadata and content. Set this to false to revert to pre-4.0 behaviour
*/
public static final String LITERALS_OVERRIDE = "literalsOverride";
/**
* Capture the specified fields (and everything included below it that isn't capture by some other capture field)
* separately from the default. This is different
* then the case of passing in an XPath expression.
* <p>
* The Capture field is based on the localName returned to the
* {@link org.apache.solr.handler.extraction.SolrContentHandler}
* by Tika, not to be confused by the mapped field. The field name can then
* be mapped into the index schema.
* <p>
* For instance, a Tika document may look like:
* <pre>
* &lt;html&gt;
* ...
* &lt;body&gt;
* &lt;p&gt;some text here. &lt;div&gt;more text&lt;/div&gt;&lt;/p&gt;
* Some more text
* &lt;/body&gt;
* </pre>
* By passing in the p tag, you could capture all P tags separately from the rest of the t
* Thus, in the example, the capture of the P tag would be: "some text here. more text"
*/
public static final String CAPTURE_ELEMENTS = "capture";
/**
* Capture the specified fields (and everything included below it that isn't capture by some other capture field) separately from the default. This is different
* then the case of passing in an XPath expression.
* <p>
* The Capture field is based on the localName returned to the {@link SolrContentHandler}
* by Tika, not to be confused by the mapped field. The field name can then
* be mapped into the index schema.
* <p>
* For instance, a Tika document may look like:
* <pre>
* &lt;html&gt;
* ...
* &lt;body&gt;
* &lt;p&gt;some text here. &lt;div&gt;more text&lt;/div&gt;&lt;/p&gt;
* Some more text
* &lt;/body&gt;
* </pre>
* By passing in the p tag, you could capture all P tags separately from the rest of the t
* Thus, in the example, the capture of the P tag would be: "some text here. more text"
*
*/
public static final String CAPTURE_ELEMENTS = "capture";
/**
* The type of the stream. If not specified, Tika will use mime type detection.
*/
public static final String STREAM_TYPE = "stream.type";
/**
* The type of the stream. If not specified, Tika will use mime type detection.
*/
public static final String STREAM_TYPE = "stream.type";
/**
* Optional. The file name. If specified, Tika can take this into account while
* guessing the MIME type.
*/
public static final String RESOURCE_NAME = "resource.name";
/**
* Optional. The file name. If specified, Tika can take this into account while
* guessing the MIME type.
*/
public static final String RESOURCE_NAME = "resource.name";
/**
* Optional. The password for this resource. Will be used instead of the rule based password lookup mechanisms
*/
public static final String RESOURCE_PASSWORD = "resource.password";
/**
* Optional. The password for this resource. Will be used instead of the rule based password lookup mechanisms
*/
public static final String RESOURCE_PASSWORD = "resource.password";
/**
* Optional. If specified, the prefix will be prepended to all Metadata, such that it would be possible
* to setup a dynamic field to automatically capture it
*/
public static final String UNKNOWN_FIELD_PREFIX = "uprefix";
/**
* Optional. If specified, the prefix will be prepended to all Metadata, such that it would be possible
* to setup a dynamic field to automatically capture it
*/
public static final String UNKNOWN_FIELD_PREFIX = "uprefix";
/**
* Optional. If specified and the name of a potential field cannot be determined, the default Field specified
* will be used instead.
*/
public static final String DEFAULT_FIELD = "defaultField";
/**
* Optional. If specified and the name of a potential field cannot be determined, the default Field specified
* will be used instead.
*/
public static final String DEFAULT_FIELD = "defaultField";
/**
* Optional. If specified, loads the file as a source for password lookups for Tika encrypted documents.
* <p>
* File format is Java properties format with one key=value per line.
* The key is evaluated as a regex against the file name, and the value is the password
* The rules are evaluated top-bottom, i.e. the first match will be used
* If you want a fallback password to be always used, supply a .*=&lt;defaultmypassword&gt; at the end
*/
public static final String PASSWORD_MAP_FILE = "passwordsFile";
/**
* Optional. If specified, loads the file as a source for password lookups for Tika encrypted documents.
* <p>
* File format is Java properties format with one key=value per line.
* The key is evaluated as a regex against the file name, and the value is the password
* The rules are evaluated top-bottom, i.e. the first match will be used
* If you want a fallback password to be always used, supply a .*=&lt;defaultmypassword&gt; at the end
*/
public static final String PASSWORD_MAP_FILE = "passwordsFile";
}

View File

@@ -35,7 +35,8 @@ import org.dspace.handle.service.HandleService;
* @version $Revision$
*/
public class CommunityFiliator {
public class CommunityFiliator
{
protected CommunityService communityService;
protected HandleService handleService;
@@ -46,10 +47,12 @@ public class CommunityFiliator {
}
/**
* @param argv the command line arguments given
*
* @param argv arguments
* @throws Exception if error
*/
public static void main(String[] argv) throws Exception {
public static void main(String[] argv) throws Exception
{
// create an options object and populate it
CommandLineParser parser = new PosixParser();
@@ -57,11 +60,11 @@ public class CommunityFiliator {
options.addOption("s", "set", false, "set a parent/child relationship");
options.addOption("r", "remove", false,
"remove a parent/child relationship");
"remove a parent/child relationship");
options.addOption("p", "parent", true,
"parent community (handle or database ID)");
"parent community (handle or database ID)");
options.addOption("c", "child", true,
"child community (handle or databaseID)");
"child community (handle or databaseID)");
options.addOption("h", "help", false, "help");
CommandLine line = parser.parse(options, argv);
@@ -70,48 +73,57 @@ public class CommunityFiliator {
String parentID = null;
String childID = null;
if (line.hasOption('h')) {
if (line.hasOption('h'))
{
HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("CommunityFiliator\n", options);
System.out
.println("\nestablish a relationship: CommunityFiliator -s -p parentID -c childID");
.println("\nestablish a relationship: CommunityFiliator -s -p parentID -c childID");
System.out
.println("remove a relationship: CommunityFiliator -r -p parentID -c childID");
.println("remove a relationship: CommunityFiliator -r -p parentID -c childID");
System.exit(0);
}
if (line.hasOption('s')) {
if (line.hasOption('s'))
{
command = "set";
}
if (line.hasOption('r')) {
if (line.hasOption('r'))
{
command = "remove";
}
if (line.hasOption('p')) { // parent
if (line.hasOption('p')) // parent
{
parentID = line.getOptionValue('p');
}
if (line.hasOption('c')) { // child
if (line.hasOption('c')) // child
{
childID = line.getOptionValue('c');
}
// now validate
// must have a command set
if (command == null) {
if (command == null)
{
System.out
.println("Error - must run with either set or remove (run with -h flag for details)");
.println("Error - must run with either set or remove (run with -h flag for details)");
System.exit(1);
}
if ("set".equals(command) || "remove".equals(command)) {
if (parentID == null) {
if ("set".equals(command) || "remove".equals(command))
{
if (parentID == null)
{
System.out.println("Error - a parentID must be specified (run with -h flag for details)");
System.exit(1);
}
if (childID == null) {
if (childID == null)
{
System.out.println("Error - a childID must be specified (run with -h flag for details)");
System.exit(1);
}
@@ -123,57 +135,71 @@ public class CommunityFiliator {
// we are superuser!
c.turnOffAuthorisationSystem();
try {
try
{
// validate and resolve the parent and child IDs into commmunities
Community parent = filiator.resolveCommunity(c, parentID);
Community child = filiator.resolveCommunity(c, childID);
if (parent == null) {
if (parent == null)
{
System.out.println("Error, parent community cannot be found: "
+ parentID);
+ parentID);
System.exit(1);
}
if (child == null) {
if (child == null)
{
System.out.println("Error, child community cannot be found: "
+ childID);
+ childID);
System.exit(1);
}
if ("set".equals(command)) {
if ("set".equals(command))
{
filiator.filiate(c, parent, child);
} else {
}
else
{
filiator.defiliate(c, parent, child);
}
} catch (SQLException sqlE) {
}
catch (SQLException sqlE)
{
System.out.println("Error - SQL exception: " + sqlE.toString());
} catch (AuthorizeException authE) {
}
catch (AuthorizeException authE)
{
System.out.println("Error - Authorize exception: "
+ authE.toString());
} catch (IOException ioE) {
+ authE.toString());
}
catch (IOException ioE)
{
System.out.println("Error - IO exception: " + ioE.toString());
}
}
/**
* @param c context
*
* @param c context
* @param parent parent Community
* @param child child community
* @throws SQLException if database error
* @param child child community
* @throws SQLException if database error
* @throws AuthorizeException if authorize error
* @throws IOException if IO error
* @throws IOException if IO error
*/
public void filiate(Context c, Community parent, Community child)
throws SQLException, AuthorizeException, IOException {
throws SQLException, AuthorizeException, IOException
{
// check that a valid filiation would be established
// first test - proposed child must currently be an orphan (i.e.
// top-level)
Community childDad = CollectionUtils.isNotEmpty(child.getParentCommunities()) ? child.getParentCommunities()
.iterator().next() : null;
Community childDad = CollectionUtils.isNotEmpty(child.getParentCommunities()) ? child.getParentCommunities().iterator().next() : null;
if (childDad != null) {
if (childDad != null)
{
System.out.println("Error, child community: " + child.getID()
+ " already a child of: " + childDad.getID());
+ " already a child of: " + childDad.getID());
System.exit(1);
}
@@ -181,10 +207,12 @@ public class CommunityFiliator {
// child
List<Community> parentDads = parent.getParentCommunities();
for (int i = 0; i < parentDads.size(); i++) {
if (parentDads.get(i).getID().equals(child.getID())) {
for (int i = 0; i < parentDads.size(); i++)
{
if (parentDads.get(i).getID().equals(child.getID()))
{
System.out
.println("Error, circular parentage - child is parent of parent");
.println("Error, circular parentage - child is parent of parent");
System.exit(1);
}
}
@@ -195,34 +223,39 @@ public class CommunityFiliator {
// complete the pending transaction
c.complete();
System.out.println("Filiation complete. Community: '" + parent.getID()
+ "' is parent of community: '" + child.getID() + "'");
+ "' is parent of community: '" + child.getID() + "'");
}
/**
* @param c context
*
* @param c context
* @param parent parent Community
* @param child child community
* @throws SQLException if database error
* @param child child community
* @throws SQLException if database error
* @throws AuthorizeException if authorize error
* @throws IOException if IO error
* @throws IOException if IO error
*/
public void defiliate(Context c, Community parent, Community child)
throws SQLException, AuthorizeException, IOException {
throws SQLException, AuthorizeException, IOException
{
// verify that child is indeed a child of parent
List<Community> parentKids = parent.getSubcommunities();
boolean isChild = false;
for (int i = 0; i < parentKids.size(); i++) {
if (parentKids.get(i).getID().equals(child.getID())) {
for (int i = 0; i < parentKids.size(); i++)
{
if (parentKids.get(i).getID().equals(child.getID()))
{
isChild = true;
break;
}
}
if (!isChild) {
if (!isChild)
{
System.out
.println("Error, child community not a child of parent community");
.println("Error, child community not a child of parent community");
System.exit(1);
}
@@ -236,33 +269,37 @@ public class CommunityFiliator {
// complete the pending transaction
c.complete();
System.out.println("Defiliation complete. Community: '" + child.getID()
+ "' is no longer a child of community: '" + parent.getID()
+ "'");
+ "' is no longer a child of community: '" + parent.getID()
+ "'");
}
/**
* Find a community by ID
*
* @param c context
* @param c context
* @param communityID community ID
* @return Community object
* @throws SQLException if database error
*/
protected Community resolveCommunity(Context c, String communityID)
throws SQLException {
throws SQLException
{
Community community = null;
if (communityID.indexOf('/') != -1) {
if (communityID.indexOf('/') != -1)
{
// has a / must be a handle
community = (Community) handleService.resolveToObject(c,
communityID);
communityID);
// ensure it's a community
if ((community == null)
|| (community.getType() != Constants.COMMUNITY)) {
|| (community.getType() != Constants.COMMUNITY))
{
community = null;
}
} else {
}
else
{
community = communityService.find(c, UUID.fromString(communityID));
}

View File

@@ -15,6 +15,7 @@ import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.lang.StringUtils;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context;
@@ -41,13 +42,13 @@ import org.dspace.eperson.service.GroupService;
*
* @author Robert Tansley
* @author Richard Jones
*
* @version $Revision$
*/
public final class CreateAdministrator {
/**
* DSpace Context object
*/
private final Context context;
public final class CreateAdministrator
{
/** DSpace Context object */
private final Context context;
protected EPersonService ePersonService;
protected GroupService groupService;
@@ -56,32 +57,37 @@ public final class CreateAdministrator {
* For invoking via the command line. If called with no command line arguments,
* it will negotiate with the user for the administrator details
*
* @param argv the command line arguments given
* @param argv
* command-line arguments
* @throws Exception if error
*/
public static void main(String[] argv)
throws Exception {
CommandLineParser parser = new PosixParser();
Options options = new Options();
throws Exception
{
CommandLineParser parser = new PosixParser();
Options options = new Options();
CreateAdministrator ca = new CreateAdministrator();
CreateAdministrator ca = new CreateAdministrator();
options.addOption("e", "email", true, "administrator email address");
options.addOption("f", "first", true, "administrator first name");
options.addOption("l", "last", true, "administrator last name");
options.addOption("c", "language", true, "administrator language");
options.addOption("p", "password", true, "administrator password");
options.addOption("e", "email", true, "administrator email address");
options.addOption("f", "first", true, "administrator first name");
options.addOption("l", "last", true, "administrator last name");
options.addOption("c", "language", true, "administrator language");
options.addOption("p", "password", true, "administrator password");
CommandLine line = parser.parse(options, argv);
CommandLine line = parser.parse(options, argv);
if (line.hasOption("e") && line.hasOption("f") && line.hasOption("l") &&
line.hasOption("c") && line.hasOption("p")) {
ca.createAdministrator(line.getOptionValue("e"),
line.getOptionValue("f"), line.getOptionValue("l"),
line.getOptionValue("c"), line.getOptionValue("p"));
} else {
ca.negotiateAdministratorDetails();
}
if (line.hasOption("e") && line.hasOption("f") && line.hasOption("l") &&
line.hasOption("c") && line.hasOption("p"))
{
ca.createAdministrator(line.getOptionValue("e"),
line.getOptionValue("f"), line.getOptionValue("l"),
line.getOptionValue("c"), line.getOptionValue("p"));
}
else
{
ca.negotiateAdministratorDetails();
}
}
/**
@@ -90,8 +96,9 @@ public final class CreateAdministrator {
* @throws Exception if error
*/
protected CreateAdministrator()
throws Exception {
context = new Context();
throws Exception
{
context = new Context();
groupService = EPersonServiceFactory.getInstance().getGroupService();
ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
}
@@ -103,96 +110,109 @@ public final class CreateAdministrator {
* @throws Exception if error
*/
protected void negotiateAdministratorDetails()
throws Exception {
throws Exception
{
Console console = System.console();
System.out.println("Creating an initial administrator account");
System.out.println("Creating an initial administrator account");
boolean dataOK = false;
boolean dataOK = false;
String email = null;
String firstName = null;
String lastName = null;
String email = null;
String firstName = null;
String lastName = null;
char[] password1 = null;
char[] password2 = null;
String language = I18nUtil.DEFAULTLOCALE.getLanguage();
String language = I18nUtil.DEFAULTLOCALE.getLanguage();
while (!dataOK) {
System.out.print("E-mail address: ");
System.out.flush();
while (!dataOK)
{
System.out.print("E-mail address: ");
System.out.flush();
email = console.readLine();
if (!StringUtils.isBlank(email)) {
email = console.readLine();
if (!StringUtils.isBlank(email))
{
email = email.trim();
} else {
}
else
{
System.out.println("Please provide an email address.");
continue;
}
System.out.print("First name: ");
System.out.flush();
System.out.print("First name: ");
System.out.flush();
firstName = console.readLine();
firstName = console.readLine();
if (firstName != null) {
if (firstName != null)
{
firstName = firstName.trim();
}
System.out.print("Last name: ");
System.out.flush();
System.out.print("Last name: ");
System.out.flush();
lastName = console.readLine();
lastName = console.readLine();
if (lastName != null) {
if (lastName != null)
{
lastName = lastName.trim();
}
if (ConfigurationManager.getProperty("webui.supported.locales") != null) {
System.out.println("Select one of the following languages: " + ConfigurationManager
.getProperty("webui.supported.locales"));
if (ConfigurationManager.getProperty("webui.supported.locales") != null)
{
System.out.println("Select one of the following languages: " + ConfigurationManager.getProperty("webui.supported.locales"));
System.out.print("Language: ");
System.out.flush();
language = console.readLine();
language = console.readLine();
if (language != null) {
if (language != null)
{
language = language.trim();
language = I18nUtil.getSupportedLocale(new Locale(language)).getLanguage();
}
}
System.out.println("Password will not display on screen.");
System.out.print("Password: ");
System.out.flush();
System.out.println("Password will not display on screen.");
System.out.print("Password: ");
System.out.flush();
password1 = console.readPassword();
password1 = console.readPassword();
System.out.print("Again to confirm: ");
System.out.flush();
System.out.print("Again to confirm: ");
System.out.flush();
password2 = console.readPassword();
password2 = console.readPassword();
//TODO real password validation
if (password1.length > 1 && Arrays.equals(password1, password2)) {
// password OK
System.out.print("Is the above data correct? (y or n): ");
System.out.flush();
if (password1.length > 1 && Arrays.equals(password1, password2))
{
// password OK
System.out.print("Is the above data correct? (y or n): ");
System.out.flush();
String s = console.readLine();
String s = console.readLine();
if (s != null) {
if (s != null)
{
s = s.trim();
if (s.toLowerCase().startsWith("y")) {
if (s.toLowerCase().startsWith("y"))
{
dataOK = true;
}
}
} else {
System.out.println("Passwords don't match");
}
}
}
else
{
System.out.println("Passwords don't match");
}
}
// if we make it to here, we are ready to create an administrator
createAdministrator(email, firstName, lastName, language, String.valueOf(password1));
// if we make it to here, we are ready to create an administrator
createAdministrator(email, firstName, lastName, language, String.valueOf(password1));
//Cleaning arrays that held password
Arrays.fill(password1, ' ');
@@ -203,33 +223,37 @@ public final class CreateAdministrator {
* Create the administrator with the given details. If the user
* already exists then they are simply upped to administrator status
*
* @param email the email for the user
* @param first user's first name
* @param last user's last name
* @param email the email for the user
* @param first user's first name
* @param last user's last name
* @param language preferred language
* @param pw desired password
* @param pw desired password
*
* @throws Exception if error
*/
protected void createAdministrator(String email, String first, String last,
String language, String pw)
throws Exception {
// Of course we aren't an administrator yet so we need to
// circumvent authorisation
context.turnOffAuthorisationSystem();
String language, String pw)
throws Exception
{
// Of course we aren't an administrator yet so we need to
// circumvent authorisation
context.turnOffAuthorisationSystem();
// Find administrator group
Group admins = groupService.findByName(context, Group.ADMIN);
// Find administrator group
Group admins = groupService.findByName(context, Group.ADMIN);
if (admins == null) {
throw new IllegalStateException("Error, no admin group (group 1) found");
}
if (admins == null)
{
throw new IllegalStateException("Error, no admin group (group 1) found");
}
// Create the administrator e-person
EPerson eperson = ePersonService.findByEmail(context, email);
// Create the administrator e-person
EPerson eperson = ePersonService.findByEmail(context,email);
// check if the email belongs to a registered user,
// if not create a new user with this email
if (eperson == null) {
if (eperson == null)
{
eperson = ePersonService.create(context);
eperson.setEmail(email);
eperson.setCanLogIn(true);
@@ -237,17 +261,17 @@ public final class CreateAdministrator {
eperson.setSelfRegistered(false);
}
eperson.setLastName(context, last);
eperson.setFirstName(context, first);
eperson.setLanguage(context, language);
eperson.setLastName(context, last);
eperson.setFirstName(context, first);
eperson.setLanguage(context, language);
ePersonService.setPassword(eperson, pw);
ePersonService.update(context, eperson);
groupService.addMember(context, admins, eperson);
groupService.addMember(context, admins, eperson);
groupService.update(context, admins);
context.complete();
context.complete();
System.out.println("Administrator account created");
System.out.println("Administrator account created");
}
}

View File

@@ -7,19 +7,7 @@
*/
package org.dspace.administer;
import java.io.BufferedWriter;
import java.io.FileWriter;
import java.io.IOException;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.cli.*;
import org.apache.xml.serialize.Method;
import org.apache.xml.serialize.OutputFormat;
import org.apache.xml.serialize.XMLSerializer;
@@ -31,6 +19,14 @@ import org.dspace.content.service.MetadataSchemaService;
import org.dspace.core.Context;
import org.xml.sax.SAXException;
import java.io.BufferedWriter;
import java.io.FileWriter;
import java.io.IOException;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* @author Graham Triggs
@@ -41,53 +37,51 @@ import org.xml.sax.SAXException;
* The form of the XML is as follows
* {@code
* <metadata-schemas>
* <schema>
* <name>dc</name>
* <namespace>http://dublincore.org/documents/dcmi-terms/</namespace>
* </schema>
* <schema>
* <name>dc</name>
* <namespace>http://dublincore.org/documents/dcmi-terms/</namespace>
* </schema>
* </metadata-schemas>
* }
*/
public class MetadataExporter {
public class MetadataExporter
{
protected static MetadataSchemaService metadataSchemaService = ContentServiceFactory.getInstance()
.getMetadataSchemaService();
protected static MetadataFieldService metadataFieldService = ContentServiceFactory.getInstance()
.getMetadataFieldService();
/**
* Default constructor
*/
private MetadataExporter() { }
protected static MetadataSchemaService metadataSchemaService = ContentServiceFactory.getInstance().getMetadataSchemaService();
protected static MetadataFieldService metadataFieldService = ContentServiceFactory.getInstance().getMetadataFieldService();
/**
* @param args commandline arguments
* @throws ParseException if parser error
* @throws SAXException if XML parse error
* @throws IOException if IO error
* @throws SQLException if database error
* @throws ParseException if parser error
* @throws SAXException if XML parse error
* @throws IOException if IO error
* @throws SQLException if database error
* @throws RegistryExportException if export error
*/
public static void main(String[] args)
throws ParseException, SQLException, IOException, SAXException, RegistryExportException {
public static void main(String[] args) throws ParseException, SQLException, IOException, SAXException, RegistryExportException
{
// create an options object and populate it
CommandLineParser parser = new PosixParser();
Options options = new Options();
options.addOption("f", "file", true, "output xml file for registry");
options.addOption("f", "file", true, "output xml file for registry");
options.addOption("s", "schema", true, "the name of the schema to export");
CommandLine line = parser.parse(options, args);
String file = null;
String file = null;
String schema = null;
if (line.hasOption('f')) {
file = line.getOptionValue('f');
} else {
if (line.hasOption('f'))
{
file = line.getOptionValue('f');
}
else
{
usage();
System.exit(0);
}
if (line.hasOption('s')) {
if (line.hasOption('s'))
{
schema = line.getOptionValue('s');
}
@@ -96,16 +90,15 @@ public class MetadataExporter {
/**
* Save a registry to a filepath
*
* @param file filepath
* @param file filepath
* @param schema schema definition to save
* @throws SQLException if database error
* @throws IOException if IO error
* @throws SAXException if XML error
* @throws SQLException if database error
* @throws IOException if IO error
* @throws SAXException if XML error
* @throws RegistryExportException if export error
*/
public static void saveRegistry(String file, String schema)
throws SQLException, IOException, SAXException, RegistryExportException {
public static void saveRegistry(String file, String schema) throws SQLException, IOException, SAXException, RegistryExportException
{
// create a context
Context context = new Context();
context.turnOffAuthorisationSystem();
@@ -125,22 +118,27 @@ public class MetadataExporter {
List<MetadataField> mdFields = null;
// If a single schema has been specified
if (schema != null && !"".equals(schema)) {
if (schema != null && !"".equals(schema))
{
// Get the id of that schema
MetadataSchema mdSchema = metadataSchemaService.find(context, schema);
if (mdSchema == null) {
if (mdSchema == null)
{
throw new RegistryExportException("no schema to export");
}
// Get the metadata fields only for the specified schema
mdFields = metadataFieldService.findAllInSchema(context, mdSchema);
} else {
}
else
{
// Get the metadata fields for all the schemas
mdFields = metadataFieldService.findAll(context);
}
// Output the metadata fields
for (MetadataField mdField : mdFields) {
for (MetadataField mdField : mdFields)
{
saveType(context, xmlSerializer, mdField);
}
@@ -153,26 +151,29 @@ public class MetadataExporter {
/**
* Serialize the schema registry. If the parameter 'schema' is null or empty, save all schemas
*
* @param context DSpace Context
* @param context DSpace Context
* @param xmlSerializer XML serializer
* @param schema schema (may be null to save all)
* @throws SQLException if database error
* @throws SAXException if XML error
* @param schema schema (may be null to save all)
* @throws SQLException if database error
* @throws SAXException if XML error
* @throws RegistryExportException if export error
*/
public static void saveSchema(Context context, XMLSerializer xmlSerializer, String schema)
throws SQLException, SAXException, RegistryExportException {
if (schema != null && !"".equals(schema)) {
public static void saveSchema(Context context, XMLSerializer xmlSerializer, String schema) throws SQLException, SAXException, RegistryExportException
{
if (schema != null && !"".equals(schema))
{
// Find a single named schema
MetadataSchema mdSchema = metadataSchemaService.find(context, schema);
saveSchema(xmlSerializer, mdSchema);
} else {
}
else
{
// Find all schemas
List<MetadataSchema> mdSchemas = metadataSchemaService.findAll(context);
for (MetadataSchema mdSchema : mdSchemas) {
for (MetadataSchema mdSchema : mdSchemas)
{
saveSchema(xmlSerializer, mdSchema);
}
}
@@ -182,26 +183,29 @@ public class MetadataExporter {
* Serialize a single schema (namespace) registry entry
*
* @param xmlSerializer XML serializer
* @param mdSchema DSpace metadata schema
* @throws SAXException if XML error
* @param mdSchema DSpace metadata schema
* @throws SAXException if XML error
* @throws RegistryExportException if export error
*/
private static void saveSchema(XMLSerializer xmlSerializer, MetadataSchema mdSchema)
throws SAXException, RegistryExportException {
private static void saveSchema(XMLSerializer xmlSerializer, MetadataSchema mdSchema) throws SAXException, RegistryExportException
{
// If we haven't got a schema, it's an error
if (mdSchema == null) {
if (mdSchema == null)
{
throw new RegistryExportException("no schema to export");
}
String name = mdSchema.getName();
String name = mdSchema.getName();
String namespace = mdSchema.getNamespace();
if (name == null || "".equals(name)) {
if (name == null || "".equals(name))
{
System.out.println("name is null, skipping");
return;
}
if (namespace == null || "".equals(namespace)) {
if (namespace == null || "".equals(namespace))
{
System.out.println("namespace is null, skipping");
return;
}
@@ -225,18 +229,19 @@ public class MetadataExporter {
/**
* Serialize a single metadata field registry entry to xml
*
* @param context DSpace context
* @param context DSpace context
* @param xmlSerializer xml serializer
* @param mdField DSpace metadata field
* @throws SAXException if XML error
* @param mdField DSpace metadata field
* @throws SAXException if XML error
* @throws RegistryExportException if export error
* @throws SQLException if database error
* @throws IOException if IO error
* @throws SQLException if database error
* @throws IOException if IO error
*/
private static void saveType(Context context, XMLSerializer xmlSerializer, MetadataField mdField)
throws SAXException, RegistryExportException, SQLException, IOException {
private static void saveType(Context context, XMLSerializer xmlSerializer, MetadataField mdField) throws SAXException, RegistryExportException, SQLException, IOException
{
// If we haven't been given a field, it's an error
if (mdField == null) {
if (mdField == null)
{
throw new RegistryExportException("no field to export");
}
@@ -247,7 +252,8 @@ public class MetadataExporter {
String scopeNote = mdField.getScopeNote();
// We must have a schema and element
if (schemaName == null || element == null) {
if (schemaName == null || element == null)
{
throw new RegistryExportException("incomplete field information");
}
@@ -265,20 +271,26 @@ public class MetadataExporter {
xmlSerializer.endElement("element");
// Output the qualifier, if present
if (qualifier != null) {
if (qualifier != null)
{
xmlSerializer.startElement("qualifier", null);
xmlSerializer.characters(qualifier.toCharArray(), 0, qualifier.length());
xmlSerializer.endElement("qualifier");
} else {
}
else
{
xmlSerializer.comment("unqualified");
}
// Output the scope note, if present
if (scopeNote != null) {
if (scopeNote != null)
{
xmlSerializer.startElement("scope_note", null);
xmlSerializer.characters(scopeNote.toCharArray(), 0, scopeNote.length());
xmlSerializer.endElement("scope_note");
} else {
}
else
{
xmlSerializer.comment("no scope note");
}
@@ -286,29 +298,31 @@ public class MetadataExporter {
}
static Map<Integer, String> schemaMap = new HashMap<Integer, String>();
/**
* Helper method to retrieve a schema name for the field.
* Caches the name after looking up the id.
*
* @param context DSpace Context
* @param mdField DSpace metadata field
* @return name of schema
* @throws SQLException if database error
* @throws SQLException if database error
* @throws RegistryExportException if export error
*/
private static String getSchemaName(Context context, MetadataField mdField)
throws SQLException, RegistryExportException {
private static String getSchemaName(Context context, MetadataField mdField) throws SQLException, RegistryExportException
{
// Get name from cache
String name = schemaMap.get(mdField.getMetadataSchema().getID());
if (name == null) {
if (name == null)
{
// Name not retrieved before, so get the schema now
MetadataSchema mdSchema = metadataSchemaService.find(context, mdField.getMetadataSchema().getID());
if (mdSchema != null) {
if (mdSchema != null)
{
name = mdSchema.getName();
schemaMap.put(mdSchema.getID(), name);
} else {
}
else
{
// Can't find the schema
throw new RegistryExportException("Can't get schema name for field");
}
@@ -319,10 +333,11 @@ public class MetadataExporter {
/**
* Print the usage message to stdout
*/
public static void usage() {
public static void usage()
{
String usage = "Use this class with the following options:\n" +
" -f <xml output file> : specify the output file for the schemas\n" +
" -s <schema> : name of the schema to export\n";
" -f <xml output file> : specify the output file for the schemas\n" +
" -s <schema> : name of the schema to export\n";
System.out.println(usage);
}
}

View File

@@ -9,6 +9,7 @@ package org.dspace.administer;
import java.io.IOException;
import java.sql.SQLException;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException;
@@ -17,7 +18,9 @@ import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
import org.apache.xpath.XPathAPI;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchema;
@@ -28,9 +31,11 @@ import org.dspace.content.service.MetadataSchemaService;
import org.dspace.core.Context;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
/**
@@ -44,52 +49,44 @@ import org.xml.sax.SAXException;
*
* {@code
* <dspace-dc-types>
* <dc-type>
* <schema>icadmin</schema>
* <element>status</element>
* <qualifier>dateset</qualifier>
* <scope_note>the workflow status of an item</scope_note>
* </dc-type>
* <dc-type>
* <schema>icadmin</schema>
* <element>status</element>
* <qualifier>dateset</qualifier>
* <scope_note>the workflow status of an item</scope_note>
* </dc-type>
*
* [....]
* [....]
*
* </dspace-dc-types>
* }
*/
public class MetadataImporter {
protected static MetadataSchemaService metadataSchemaService = ContentServiceFactory.getInstance()
.getMetadataSchemaService();
protected static MetadataFieldService metadataFieldService = ContentServiceFactory.getInstance()
.getMetadataFieldService();
public class MetadataImporter
{
protected static MetadataSchemaService metadataSchemaService = ContentServiceFactory.getInstance().getMetadataSchemaService();
protected static MetadataFieldService metadataFieldService = ContentServiceFactory.getInstance().getMetadataFieldService();
/**
* logging category
*/
/** logging category */
private static final Logger log = LoggerFactory.getLogger(MetadataImporter.class);
/**
* Default constructor
*/
private MetadataImporter() { }
/**
* main method for reading user input from the command line
*
* @param args the command line arguments given
* @throws ParseException if parse error
* @throws SQLException if database error
* @throws IOException if IO error
* @throws TransformerException if transformer error
* @param args arguments
* @throws ParseException if parse error
* @throws SQLException if database error
* @throws IOException if IO error
* @throws TransformerException if transformer error
* @throws ParserConfigurationException if config error
* @throws AuthorizeException if authorization error
* @throws SAXException if parser error
* @throws NonUniqueMetadataException if duplicate metadata
* @throws RegistryImportException if import fails
* @throws AuthorizeException if authorization error
* @throws SAXException if parser error
* @throws NonUniqueMetadataException if duplicate metadata
* @throws RegistryImportException if import fails
**/
public static void main(String[] args)
throws ParseException, SQLException, IOException, TransformerException,
ParserConfigurationException, AuthorizeException, SAXException,
NonUniqueMetadataException, RegistryImportException {
throws ParseException, SQLException, IOException, TransformerException,
ParserConfigurationException, AuthorizeException, SAXException,
NonUniqueMetadataException, RegistryImportException
{
boolean forceUpdate = false;
// create an options object and populate it
@@ -100,9 +97,12 @@ public class MetadataImporter {
CommandLine line = parser.parse(options, args);
String file = null;
if (line.hasOption('f')) {
if (line.hasOption('f'))
{
file = line.getOptionValue('f');
} else {
}
else
{
usage();
System.exit(0);
}
@@ -114,23 +114,25 @@ public class MetadataImporter {
/**
* Load the data from the specified file path into the database
*
* @param file the file path containing the source data
* @param forceUpdate whether to force update
* @throws SQLException if database error
* @throws IOException if IO error
* @throws TransformerException if transformer error
* @param file the file path containing the source data
* @param forceUpdate whether to force update
* @throws SQLException if database error
* @throws IOException if IO error
* @throws TransformerException if transformer error
* @throws ParserConfigurationException if config error
* @throws AuthorizeException if authorization error
* @throws SAXException if parser error
* @throws NonUniqueMetadataException if duplicate metadata
* @throws RegistryImportException if import fails
* @throws AuthorizeException if authorization error
* @throws SAXException if parser error
* @throws NonUniqueMetadataException if duplicate metadata
* @throws RegistryImportException if import fails
*/
public static void loadRegistry(String file, boolean forceUpdate)
throws SQLException, IOException, TransformerException, ParserConfigurationException,
AuthorizeException, SAXException, NonUniqueMetadataException, RegistryImportException {
throws SQLException, IOException, TransformerException, ParserConfigurationException,
AuthorizeException, SAXException, NonUniqueMetadataException, RegistryImportException
{
Context context = null;
try {
try
{
// create a context
context = new Context();
context.turnOffAuthorisationSystem();
@@ -142,7 +144,8 @@ public class MetadataImporter {
NodeList schemaNodes = XPathAPI.selectNodeList(document, "/dspace-dc-types/dc-schema");
// Add each one as a new format to the registry
for (int i = 0; i < schemaNodes.getLength(); i++) {
for (int i = 0; i < schemaNodes.getLength(); i++)
{
Node n = schemaNodes.item(i);
loadSchema(context, n, forceUpdate);
}
@@ -151,18 +154,20 @@ public class MetadataImporter {
NodeList typeNodes = XPathAPI.selectNodeList(document, "/dspace-dc-types/dc-type");
// Add each one as a new format to the registry
for (int i = 0; i < typeNodes.getLength(); i++) {
for (int i = 0; i < typeNodes.getLength(); i++)
{
Node n = typeNodes.item(i);
loadType(context, n);
}
context.restoreAuthSystemState();
context.complete();
} finally {
// Clean up our context, if it still exists & it was never completed
if (context != null && context.isValid()) {
}
finally
{
// Clean up our context, if it still exists & it was never completed
if(context!=null && context.isValid())
context.abort();
}
}
}
@@ -170,54 +175,64 @@ public class MetadataImporter {
* Process a node in the metadata registry XML file. If the
* schema already exists, it will not be recreated
*
* @param context DSpace context object
* @param node the node in the DOM tree
* @throws SQLException if database error
* @throws IOException if IO error
* @throws TransformerException if transformer error
* @throws AuthorizeException if authorization error
* @param context
* DSpace context object
* @param node
* the node in the DOM tree
* @throws SQLException if database error
* @throws IOException if IO error
* @throws TransformerException if transformer error
* @throws AuthorizeException if authorization error
* @throws NonUniqueMetadataException if duplicate metadata
* @throws RegistryImportException if import fails
* @throws RegistryImportException if import fails
*/
private static void loadSchema(Context context, Node node, boolean updateExisting)
throws SQLException, IOException, TransformerException,
AuthorizeException, NonUniqueMetadataException, RegistryImportException {
AuthorizeException, NonUniqueMetadataException, RegistryImportException
{
// Get the values
String name = RegistryImporter.getElementData(node, "name");
String namespace = RegistryImporter.getElementData(node, "namespace");
if (name == null || "".equals(name)) {
if (name == null || "".equals(name))
{
throw new RegistryImportException("Name of schema must be supplied");
}
if (namespace == null || "".equals(namespace)) {
if (namespace == null || "".equals(namespace))
{
throw new RegistryImportException("Namespace of schema must be supplied");
}
// check to see if the schema already exists
MetadataSchema s = metadataSchemaService.find(context, name);
if (s == null) {
if (s == null)
{
// Schema does not exist - create
log.info("Registering Schema " + name + " (" + namespace + ")");
metadataSchemaService.create(context, name, namespace);
} else {
}
else
{
// Schema exists - if it's the same namespace, allow the type imports to continue
if (s.getNamespace().equals(namespace)) {
if (s.getNamespace().equals(namespace))
{
// This schema already exists with this namespace, skipping it
return;
}
// It's a different namespace - have we been told to update?
if (updateExisting) {
if (updateExisting)
{
// Update the existing schema namespace and continue to type import
log.info("Updating Schema " + name + ": New namespace " + namespace);
s.setNamespace(namespace);
metadataSchemaService.update(context, s);
} else {
throw new RegistryImportException(
"Schema " + name + " already registered with different namespace " + namespace + ". Rerun with " +
"'update' option enabled if you wish to update this schema.");
}
else
{
throw new RegistryImportException("Schema " + name + " already registered with different namespace " + namespace + ". Rerun with 'update' option enabled if you wish to update this schema.");
}
}
@@ -228,18 +243,21 @@ public class MetadataImporter {
* be a "dc-type" node. If the type already exists, then it
* will not be reimported
*
* @param context DSpace context object
* @param node the node in the DOM tree
* @throws SQLException if database error
* @throws IOException if IO error
* @throws TransformerException if transformer error
* @throws AuthorizeException if authorization error
* @param context
* DSpace context object
* @param node
* the node in the DOM tree
* @throws SQLException if database error
* @throws IOException if IO error
* @throws TransformerException if transformer error
* @throws AuthorizeException if authorization error
* @throws NonUniqueMetadataException if duplicate metadata
* @throws RegistryImportException if import fails
* @throws RegistryImportException if import fails
*/
private static void loadType(Context context, Node node)
throws SQLException, IOException, TransformerException,
AuthorizeException, NonUniqueMetadataException, RegistryImportException {
throws SQLException, IOException, TransformerException,
AuthorizeException, NonUniqueMetadataException, RegistryImportException
{
// Get the values
String schema = RegistryImporter.getElementData(node, "schema");
String element = RegistryImporter.getElementData(node, "element");
@@ -247,7 +265,8 @@ public class MetadataImporter {
String scopeNote = RegistryImporter.getElementData(node, "scope_note");
// If the schema is not provided default to DC
if (schema == null) {
if (schema == null)
{
schema = MetadataSchema.DC_SCHEMA;
}
@@ -255,21 +274,22 @@ public class MetadataImporter {
// Find the matching schema object
MetadataSchema schemaObj = metadataSchemaService.find(context, schema);
if (schemaObj == null) {
if (schemaObj == null)
{
throw new RegistryImportException("Schema '" + schema + "' is not registered and does not exist.");
}
MetadataField mf = metadataFieldService.findByElement(context, schemaObj, element, qualifier);
if (mf != null) {
if (mf != null)
{
// Metadata field already exists, skipping it
return;
}
// Actually create this metadata field as it doesn't yet exist
String fieldName = schema + "." + element + "." + qualifier;
if (qualifier == null) {
if(qualifier==null)
fieldName = schema + "." + element;
}
log.info("Registering metadata field " + fieldName);
MetadataField field = metadataFieldService.create(context, schemaObj, element, qualifier, scopeNote);
metadataFieldService.update(context, field);
@@ -278,10 +298,11 @@ public class MetadataImporter {
/**
* Print the usage message to stdout
*/
public static void usage() {
public static void usage()
{
String usage = "Use this class with the following option:\n" +
" -f <xml source file> : specify which xml source file " +
"contains the DC fields to import.\n";
" -f <xml source file> : specify which xml source file " +
"contains the DC fields to import.\n";
System.out.println(usage);
}
}

View File

@@ -12,11 +12,13 @@ package org.dspace.administer;
*
* An exception to report any problems with registry exports
*/
public class RegistryExportException extends Exception {
public class RegistryExportException extends Exception
{
/**
* Create an empty authorize exception
*/
public RegistryExportException() {
public RegistryExportException()
{
super();
}
@@ -25,7 +27,8 @@ public class RegistryExportException extends Exception {
*
* @param message exception message
*/
public RegistryExportException(String message) {
public RegistryExportException(String message)
{
super(message);
}
@@ -33,9 +36,10 @@ public class RegistryExportException extends Exception {
* create an exception with an inner exception and a message
*
* @param message exception message
* @param e reference to Throwable
* @param e reference to Throwable
*/
public RegistryExportException(String message, Throwable e) {
public RegistryExportException(String message, Throwable e)
{
super(message, e);
}
@@ -44,7 +48,8 @@ public class RegistryExportException extends Exception {
*
* @param e reference to Throwable
*/
public RegistryExportException(Throwable e) {
public RegistryExportException(Throwable e)
{
super(e);
}

View File

@@ -12,11 +12,13 @@ package org.dspace.administer;
*
* An exception to report any problems with registry imports
*/
public class RegistryImportException extends Exception {
public class RegistryImportException extends Exception
{
/**
* Create an empty authorize exception
*/
public RegistryImportException() {
public RegistryImportException()
{
super();
}
@@ -25,27 +27,30 @@ public class RegistryImportException extends Exception {
*
* @param message error message
*/
public RegistryImportException(String message) {
public RegistryImportException(String message)
{
super(message);
}
/**
* create an exception with an inner exception and a message
*
* @param message error message
* @param e throwable
* @param message error message
* @param e throwable
*/
public RegistryImportException(String message, Throwable e) {
super(message, e);
public RegistryImportException(String message, Throwable e)
{
super(message, e);
}
/**
* create an exception with an inner exception
*
* @param e throwable
* @param e throwable
*/
public RegistryImportException(Throwable e) {
super(e);
public RegistryImportException(Throwable e)
{
super(e);
}
}

View File

@@ -9,15 +9,18 @@ package org.dspace.administer;
import java.io.File;
import java.io.IOException;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException;
import org.apache.xpath.XPathAPI;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
/**
@@ -28,26 +31,24 @@ import org.xml.sax.SAXException;
* I am the author, really I ripped these methods off from other
* classes
*/
public class RegistryImporter {
/**
* Default constructor
*/
private RegistryImporter() { }
public class RegistryImporter
{
/**
* Load in the XML from file.
*
* @param filename the filename to load from
* @param filename
* the filename to load from
*
* @return the DOM representation of the XML file
* @throws IOException if IO error
* @throws IOException if IO error
* @throws ParserConfigurationException if configuration parse error
* @throws SAXException if XML parse error
* @throws SAXException if XML parse error
*/
public static Document loadXML(String filename)
throws IOException, ParserConfigurationException, SAXException {
throws IOException, ParserConfigurationException, SAXException
{
DocumentBuilder builder = DocumentBuilderFactory.newInstance()
.newDocumentBuilder();
.newDocumentBuilder();
Document document = builder.parse(new File(filename));
@@ -66,17 +67,21 @@ public class RegistryImporter {
* </P>
* Why this isn't a core part of the XML API I do not know...
*
* @param parentElement the element, whose child element you want the CDATA from
* @param childName the name of the element you want the CDATA from
* @return the CDATA as a <code>String</code>
* @param parentElement
* the element, whose child element you want the CDATA from
* @param childName
* the name of the element you want the CDATA from
* @throws TransformerException if error
* @return the CDATA as a <code>String</code>
*/
public static String getElementData(Node parentElement, String childName)
throws TransformerException {
throws TransformerException
{
// Grab the child node
Node childNode = XPathAPI.selectSingleNode(parentElement, childName);
if (childNode == null) {
if (childNode == null)
{
// No child node, so no values
return null;
}
@@ -84,7 +89,8 @@ public class RegistryImporter {
// Get the #text
Node dataNode = childNode.getFirstChild();
if (dataNode == null) {
if (dataNode == null)
{
return null;
}
@@ -100,8 +106,8 @@ public class RegistryImporter {
* <P>
* <code>
* &lt;foo&gt;
* &lt;bar&gt;val1&lt;/bar&gt;
* &lt;bar&gt;val2&lt;/bar&gt;
* &lt;bar&gt;val1&lt;/bar&gt;
* &lt;bar&gt;val2&lt;/bar&gt;
* &lt;/foo&gt;
* </code>
* passing this the <code>foo</code> node and <code>bar</code> will
@@ -109,19 +115,23 @@ public class RegistryImporter {
* </P>
* Why this also isn't a core part of the XML API I do not know...
*
* @param parentElement the element, whose child element you want the CDATA from
* @param childName the name of the element you want the CDATA from
* @return the CDATA as a <code>String</code>
* @param parentElement
* the element, whose child element you want the CDATA from
* @param childName
* the name of the element you want the CDATA from
* @throws TransformerException if error
* @return the CDATA as a <code>String</code>
*/
public static String[] getRepeatedElementData(Node parentElement,
String childName) throws TransformerException {
String childName) throws TransformerException
{
// Grab the child node
NodeList childNodes = XPathAPI.selectNodeList(parentElement, childName);
String[] data = new String[childNodes.getLength()];
for (int i = 0; i < childNodes.getLength(); i++) {
for (int i = 0; i < childNodes.getLength(); i++)
{
// Get the #text node
Node dataNode = childNodes.item(i).getFirstChild();

View File

@@ -12,6 +12,7 @@ import java.io.IOException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
@@ -43,33 +44,29 @@ import org.xml.sax.SAXException;
* @author Robert Tansley
* @version $Revision$
*/
public class RegistryLoader {
/**
* log4j category
*/
public class RegistryLoader
{
/** log4j category */
private static Logger log = Logger.getLogger(RegistryLoader.class);
protected static BitstreamFormatService bitstreamFormatService = ContentServiceFactory.getInstance()
.getBitstreamFormatService();
/**
* Default constructor
*/
private RegistryLoader() { }
protected static BitstreamFormatService bitstreamFormatService = ContentServiceFactory.getInstance().getBitstreamFormatService();
/**
* For invoking via the command line
*
* @param argv the command line arguments given
* @param argv
* command-line arguments
* @throws Exception if error
*/
public static void main(String[] argv) throws Exception {
public static void main(String[] argv) throws Exception
{
String usage = "Usage: " + RegistryLoader.class.getName()
+ " (-bitstream | -metadata) registry-file.xml";
+ " (-bitstream | -metadata) registry-file.xml";
Context context = null;
try {
try
{
context = new Context();
// Can't update registries anonymously, so we need to turn off
@@ -77,12 +74,17 @@ public class RegistryLoader {
context.turnOffAuthorisationSystem();
// Work out what we're loading
if (argv[0].equalsIgnoreCase("-bitstream")) {
if (argv[0].equalsIgnoreCase("-bitstream"))
{
RegistryLoader.loadBitstreamFormats(context, argv[1]);
} else if (argv[0].equalsIgnoreCase("-metadata")) {
}
else if (argv[0].equalsIgnoreCase("-metadata"))
{
// Call MetadataImporter, as it handles Metadata schema updates
MetadataImporter.loadRegistry(argv[1], true);
} else {
}
else
{
System.err.println(usage);
}
@@ -90,69 +92,81 @@ public class RegistryLoader {
context.complete();
System.exit(0);
} catch (ArrayIndexOutOfBoundsException ae) {
}
catch (ArrayIndexOutOfBoundsException ae)
{
System.err.println(usage);
System.exit(1);
} catch (Exception e) {
}
catch (Exception e)
{
log.fatal(LogManager.getHeader(context, "error_loading_registries",
""), e);
""), e);
System.err.println("Error: \n - " + e.getMessage());
System.exit(1);
} finally {
}
finally
{
// Clean up our context, if it still exists & it was never completed
if (context != null && context.isValid()) {
if(context!=null && context.isValid())
context.abort();
}
}
}
/**
* Load Bitstream Format metadata
*
* @param context DSpace context object
* @param filename the filename of the XML file to load
* @throws SQLException if database error
* @throws IOException if IO error
* @throws TransformerException if transformer error
* @param context
* DSpace context object
* @param filename
* the filename of the XML file to load
* @throws SQLException if database error
* @throws IOException if IO error
* @throws TransformerException if transformer error
* @throws ParserConfigurationException if config error
* @throws AuthorizeException if authorization error
* @throws SAXException if parser error
* @throws AuthorizeException if authorization error
* @throws SAXException if parser error
*/
public static void loadBitstreamFormats(Context context, String filename)
throws SQLException, IOException, ParserConfigurationException,
SAXException, TransformerException, AuthorizeException {
throws SQLException, IOException, ParserConfigurationException,
SAXException, TransformerException, AuthorizeException
{
Document document = loadXML(filename);
// Get the nodes corresponding to formats
NodeList typeNodes = XPathAPI.selectNodeList(document,
"dspace-bitstream-types/bitstream-type");
"dspace-bitstream-types/bitstream-type");
// Add each one as a new format to the registry
for (int i = 0; i < typeNodes.getLength(); i++) {
for (int i = 0; i < typeNodes.getLength(); i++)
{
Node n = typeNodes.item(i);
loadFormat(context, n);
}
log.info(LogManager.getHeader(context, "load_bitstream_formats",
"number_loaded=" + typeNodes.getLength()));
"number_loaded=" + typeNodes.getLength()));
}
/**
* Process a node in the bitstream format registry XML file. The node must
* be a "bitstream-type" node
*
* @param context DSpace context object
* @param node the node in the DOM tree
* @throws SQLException if database error
* @throws IOException if IO error
* @param context
* DSpace context object
* @param node
* the node in the DOM tree
* @throws SQLException if database error
* @throws IOException if IO error
* @throws TransformerException if transformer error
* @throws AuthorizeException if authorization error
* @throws AuthorizeException if authorization error
*/
private static void loadFormat(Context context, Node node)
throws SQLException, IOException, TransformerException,
AuthorizeException {
throws SQLException, IOException, TransformerException,
AuthorizeException
{
// Get the values
String mimeType = getElementData(node, "mimetype");
String shortDesc = getElementData(node, "short_description");
@@ -170,12 +184,14 @@ public class RegistryLoader {
BitstreamFormat exists = bitstreamFormatService.findByMIMEType(context, mimeType);
// If not found by mimeType, check by short description (since this must also be unique)
if (exists == null) {
if(exists==null)
{
exists = bitstreamFormatService.findByShortDescription(context, shortDesc);
}
// If it doesn't exist, create it..otherwise skip it.
if (exists == null) {
if(exists==null)
{
// Create the format object
BitstreamFormat format = bitstreamFormatService.create(context);
@@ -199,16 +215,18 @@ public class RegistryLoader {
/**
* Load in the XML from file.
*
* @param filename the filename to load from
* @return the DOM representation of the XML file
* @throws IOException if IO error
* @param filename
* the filename to load from
* @throws IOException if IO error
* @throws ParserConfigurationException if config error
* @throws SAXException if parser error
* @throws SAXException if parser error
* @return the DOM representation of the XML file
*/
private static Document loadXML(String filename) throws IOException,
ParserConfigurationException, SAXException {
ParserConfigurationException, SAXException
{
DocumentBuilder builder = DocumentBuilderFactory.newInstance()
.newDocumentBuilder();
.newDocumentBuilder();
return builder.parse(new File(filename));
}
@@ -225,17 +243,21 @@ public class RegistryLoader {
* </P>
* Why this isn't a core part of the XML API I do not know...
*
* @param parentElement the element, whose child element you want the CDATA from
* @param childName the name of the element you want the CDATA from
* @return the CDATA as a <code>String</code>
* @param parentElement
* the element, whose child element you want the CDATA from
* @param childName
* the name of the element you want the CDATA from
* @throws TransformerException if transformer error
* @return the CDATA as a <code>String</code>
*/
private static String getElementData(Node parentElement, String childName)
throws TransformerException {
throws TransformerException
{
// Grab the child node
Node childNode = XPathAPI.selectSingleNode(parentElement, childName);
if (childNode == null) {
if (childNode == null)
{
// No child node, so no values
return null;
}
@@ -243,7 +265,8 @@ public class RegistryLoader {
// Get the #text
Node dataNode = childNode.getFirstChild();
if (dataNode == null) {
if (dataNode == null)
{
return null;
}
@@ -259,8 +282,8 @@ public class RegistryLoader {
* <P>
* <code>
* &lt;foo&gt;
* &lt;bar&gt;val1&lt;/bar&gt;
* &lt;bar&gt;val2&lt;/bar&gt;
* &lt;bar&gt;val1&lt;/bar&gt;
* &lt;bar&gt;val2&lt;/bar&gt;
* &lt;/foo&gt;
* </code>
* passing this the <code>foo</code> node and <code>bar</code> will
@@ -268,19 +291,23 @@ public class RegistryLoader {
* </P>
* Why this also isn't a core part of the XML API I do not know...
*
* @param parentElement the element, whose child element you want the CDATA from
* @param childName the name of the element you want the CDATA from
* @return the CDATA as a <code>String</code>
* @param parentElement
* the element, whose child element you want the CDATA from
* @param childName
* the name of the element you want the CDATA from
* @throws TransformerException if transformer error
* @return the CDATA as a <code>String</code>
*/
private static String[] getRepeatedElementData(Node parentElement,
String childName) throws TransformerException {
String childName) throws TransformerException
{
// Grab the child node
NodeList childNodes = XPathAPI.selectNodeList(parentElement, childName);
String[] data = new String[childNodes.getLength()];
for (int i = 0; i < childNodes.getLength(); i++) {
for (int i = 0; i < childNodes.getLength(); i++)
{
// Get the #text node
Node dataNode = childNodes.item(i).getFirstChild();

View File

@@ -14,6 +14,7 @@ import java.io.IOException;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
@@ -47,13 +48,13 @@ import org.xml.sax.SAXException;
* The XML file structure needs to be:
* {@code
* <import_structure>
* <community>
* <name>....</name>
* <community>...</community>
* <collection>
* <name>....</name>
* </collection>
* </community>
* <community>
* <name>....</name>
* <community>...</community>
* <collection>
* <name>....</name>
* </collection>
* </community>
* </import_structure>
* }
* it can be arbitrarily deep, and supports all the metadata elements
@@ -61,34 +62,26 @@ import org.xml.sax.SAXException;
* documentation for more details
*
* @author Richard Jones
*
*/
public class StructBuilder {
/**
* the output xml document which will contain updated information about the
public class StructBuilder
{
/** the output xml document which will contain updated information about the
* imported structure
*/
private static org.jdom.Document xmlOutput = new org.jdom.Document(new Element("imported_structure"));
/**
* a hashtable to hold metadata for the collection being worked on
*/
/** a hashtable to hold metadata for the collection being worked on */
private static Map<String, String> collectionMap = new HashMap<String, String>();
/**
* a hashtable to hold metadata for the community being worked on
*/
/** a hashtable to hold metadata for the community being worked on */
private static Map<String, String> communityMap = new HashMap<String, String>();
protected static CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService();
protected static CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService();
protected static EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
/**
* Default constructor
*/
private StructBuilder() { }
/**
* Main method to be run from the command line to import a structure into
* DSpace
@@ -99,42 +92,46 @@ public class StructBuilder {
*
* The output file will contain exactly the same as the source xml document, but
* with the handle for each imported item added as an attribute.
*
* @param argv the command line arguments given
* @param argv commandline arguments
* @throws Exception if an error occurs
*/
public static void main(String[] argv)
throws Exception {
throws Exception
{
CommandLineParser parser = new PosixParser();
Options options = new Options();
Options options = new Options();
options.addOption("f", "file", true, "file");
options.addOption("e", "eperson", true, "eperson");
options.addOption("o", "output", true, "output");
options.addOption( "f", "file", true, "file");
options.addOption( "e", "eperson", true, "eperson");
options.addOption("o", "output", true, "output");
CommandLine line = parser.parse(options, argv);
CommandLine line = parser.parse( options, argv );
String file = null;
String eperson = null;
String output = null;
String file = null;
String eperson = null;
String output = null;
if (line.hasOption('f')) {
file = line.getOptionValue('f');
}
if (line.hasOption('f'))
{
file = line.getOptionValue('f');
}
if (line.hasOption('e')) {
eperson = line.getOptionValue('e');
}
if (line.hasOption('e'))
{
eperson = line.getOptionValue('e');
}
if (line.hasOption('o')) {
output = line.getOptionValue('o');
}
if (line.hasOption('o'))
{
output = line.getOptionValue('o');
}
if (output == null || eperson == null || file == null) {
usage();
System.exit(0);
}
if (output == null || eperson == null || file == null)
{
usage();
System.exit(0);
}
// create a context
Context context = new Context();
@@ -172,16 +169,20 @@ public class StructBuilder {
// generate the output
Element root = xmlOutput.getRootElement();
for (int i = 0; i < elements.length; i++) {
for (int i = 0; i < elements.length; i++)
{
root.addContent(elements[i]);
}
// finally write the string into the output file
try {
try
{
BufferedWriter out = new BufferedWriter(new FileWriter(output));
out.write(new XMLOutputter().outputString(xmlOutput));
out.close();
} catch (IOException e) {
}
catch (IOException e)
{
System.out.println("Unable to write to output file " + output);
System.exit(0);
}
@@ -192,11 +193,10 @@ public class StructBuilder {
/**
* Output the usage information
*/
private static void usage() {
private static void usage()
{
System.out.println("Usage: java StructBuilder -f <source XML file> -o <output file> -e <eperson email>");
System.out.println(
"Communities will be created from the top level, and a map of communities to handles will be returned in " +
"the output file");
System.out.println("Communities will be created from the top level, and a map of communities to handles will be returned in the output file");
return;
}
@@ -204,11 +204,13 @@ public class StructBuilder {
* Validate the XML document. This method does not return, but if validation
* fails it generates an error and ceases execution
*
* @param document the XML document object
* @param document the XML document object
* @throws TransformerException if transformer error
*
*/
private static void validate(org.w3c.dom.Document document)
throws TransformerException {
throws TransformerException
{
StringBuffer err = new StringBuffer();
boolean trip = false;
@@ -216,19 +218,22 @@ public class StructBuilder {
err.append("No changes have been made to the DSpace instance\n\n");
NodeList first = XPathAPI.selectNodeList(document, "/import_structure/community");
if (first.getLength() == 0) {
if (first.getLength() == 0)
{
err.append("-There are no top level communities in the source document");
System.out.println(err.toString());
System.exit(0);
}
String errs = validateCommunities(first, 1);
if (errs != null) {
if (errs != null)
{
err.append(errs);
trip = true;
}
if (trip) {
if (trip)
{
System.out.println(err.toString());
System.exit(0);
}
@@ -239,45 +244,52 @@ public class StructBuilder {
* containing any errors encountered, or null if there were no errors
*
* @param communities the NodeList of communities to validate
* @param level the level in the XML document that we are at, for the purposes
* of error reporting
* @param level the level in the XML document that we are at, for the purposes
* of error reporting
*
* @return the errors that need to be generated by the calling method, or null if
* no errors.
* no errors.
*/
private static String validateCommunities(NodeList communities, int level)
throws TransformerException {
throws TransformerException
{
StringBuffer err = new StringBuffer();
boolean trip = false;
String errs = null;
for (int i = 0; i < communities.getLength(); i++) {
for (int i = 0; i < communities.getLength(); i++)
{
Node n = communities.item(i);
NodeList name = XPathAPI.selectNodeList(n, "name");
if (name.getLength() != 1) {
String pos = Integer.toString(i + 1);
err.append("-The level " + level + " community in position " + pos);
err.append(" does not contain exactly one name field\n");
trip = true;
}
NodeList name = XPathAPI.selectNodeList(n, "name");
if (name.getLength() != 1)
{
String pos = Integer.toString(i + 1);
err.append("-The level " + level + " community in position " + pos);
err.append(" does not contain exactly one name field\n");
trip = true;
}
// validate sub communities
NodeList subCommunities = XPathAPI.selectNodeList(n, "community");
String comErrs = validateCommunities(subCommunities, level + 1);
if (comErrs != null) {
err.append(comErrs);
trip = true;
}
// validate sub communities
NodeList subCommunities = XPathAPI.selectNodeList(n, "community");
String comErrs = validateCommunities(subCommunities, level + 1);
if (comErrs != null)
{
err.append(comErrs);
trip = true;
}
// validate collections
NodeList collections = XPathAPI.selectNodeList(n, "collection");
String colErrs = validateCollections(collections, level + 1);
if (colErrs != null) {
err.append(colErrs);
trip = true;
}
// validate collections
NodeList collections = XPathAPI.selectNodeList(n, "collection");
String colErrs = validateCollections(collections, level + 1);
if (colErrs != null)
{
err.append(colErrs);
trip = true;
}
}
if (trip) {
if (trip)
{
errs = err.toString();
}
@@ -289,27 +301,32 @@ public class StructBuilder {
* string containing any errors encountered, or returns null if no errors
*
* @param collections a NodeList of collections to validate
* @param level the level in the XML document for the purposes of error reporting
* @param level the level in the XML document for the purposes of error reporting
*
* @return the errors to be generated by the calling method, or null if none
*/
private static String validateCollections(NodeList collections, int level)
throws TransformerException {
throws TransformerException
{
StringBuffer err = new StringBuffer();
boolean trip = false;
String errs = null;
for (int i = 0; i < collections.getLength(); i++) {
for (int i = 0; i < collections.getLength(); i++)
{
Node n = collections.item(i);
NodeList name = XPathAPI.selectNodeList(n, "name");
if (name.getLength() != 1) {
String pos = Integer.toString(i + 1);
err.append("-The level " + level + " collection in position " + pos);
err.append(" does not contain exactly one name field\n");
trip = true;
}
NodeList name = XPathAPI.selectNodeList(n, "name");
if (name.getLength() != 1)
{
String pos = Integer.toString(i + 1);
err.append("-The level " + level + " collection in position " + pos);
err.append(" does not contain exactly one name field\n");
trip = true;
}
}
if (trip) {
if (trip)
{
errs = err.toString();
}
@@ -319,13 +336,16 @@ public class StructBuilder {
/**
* Load in the XML from file.
*
* @param filename the filename to load from
* @param filename
* the filename to load from
*
* @return the DOM representation of the XML file
*/
private static org.w3c.dom.Document loadXML(String filename)
throws IOException, ParserConfigurationException, SAXException {
throws IOException, ParserConfigurationException, SAXException
{
DocumentBuilder builder = DocumentBuilderFactory.newInstance()
.newDocumentBuilder();
.newDocumentBuilder();
org.w3c.dom.Document document = builder.parse(new File(filename));
@@ -336,15 +356,19 @@ public class StructBuilder {
* Return the String value of a Node
*
* @param node the node from which we want to extract the string value
*
* @return the string value of the node
*/
public static String getStringValue(Node node) {
public static String getStringValue(Node node)
{
String value = node.getNodeValue();
if (node.hasChildNodes()) {
if (node.hasChildNodes())
{
Node first = node.getFirstChild();
if (first.getNodeType() == Node.TEXT_NODE) {
if (first.getNodeType() == Node.TEXT_NODE)
{
return first.getNodeValue().trim();
}
}
@@ -356,24 +380,30 @@ public class StructBuilder {
* Take a node list of communities and build the structure from them, delegating
* to the relevant methods in this class for sub-communities and collections
*
* @param context the context of the request
* @param context the context of the request
* @param communities a nodelist of communities to create along with their sub-structures
* @param parent the parent community of the nodelist of communities to create
* @param parent the parent community of the nodelist of communities to create
*
* @return an element array containing additional information regarding the
* created communities (e.g. the handles they have been assigned)
* created communities (e.g. the handles they have been assigned)
*/
private static Element[] handleCommunities(Context context, NodeList communities, Community parent)
throws TransformerException, SQLException, Exception {
throws TransformerException, SQLException, Exception
{
Element[] elements = new Element[communities.getLength()];
for (int i = 0; i < communities.getLength(); i++) {
for (int i = 0; i < communities.getLength(); i++)
{
Community community;
Element element = new Element("community");
// create the community or sub community
if (parent != null) {
if (parent != null)
{
community = communityService.create(parent, context);
} else {
}
else
{
community = communityService.create(null, context);
}
@@ -382,9 +412,11 @@ public class StructBuilder {
// now update the metadata
Node tn = communities.item(i);
for (Map.Entry<String, String> entry : communityMap.entrySet()) {
for (Map.Entry<String, String> entry : communityMap.entrySet())
{
NodeList nl = XPathAPI.selectNodeList(tn, entry.getKey());
if (nl.getLength() == 1) {
if (nl.getLength() == 1)
{
communityService.setMetadata(context, community, entry.getValue(), getStringValue(nl.item(0)));
}
}
@@ -414,25 +446,29 @@ public class StructBuilder {
nameElement.setText(communityService.getMetadata(community, "name"));
element.addContent(nameElement);
if (communityService.getMetadata(community, "short_description") != null) {
if (communityService.getMetadata(community, "short_description") != null)
{
Element descriptionElement = new Element("description");
descriptionElement.setText(communityService.getMetadata(community, "short_description"));
element.addContent(descriptionElement);
}
if (communityService.getMetadata(community, "introductory_text") != null) {
if (communityService.getMetadata(community, "introductory_text") != null)
{
Element introElement = new Element("intro");
introElement.setText(communityService.getMetadata(community, "introductory_text"));
element.addContent(introElement);
}
if (communityService.getMetadata(community, "copyright_text") != null) {
if (communityService.getMetadata(community, "copyright_text") != null)
{
Element copyrightElement = new Element("copyright");
copyrightElement.setText(communityService.getMetadata(community, "copyright_text"));
element.addContent(copyrightElement);
}
if (communityService.getMetadata(community, "side_bar_text") != null) {
if (communityService.getMetadata(community, "side_bar_text") != null)
{
Element sidebarElement = new Element("sidebar");
sidebarElement.setText(communityService.getMetadata(community, "side_bar_text"));
element.addContent(sidebarElement);
@@ -447,10 +483,12 @@ public class StructBuilder {
Element[] collectionElements = handleCollections(context, collections, community);
int j;
for (j = 0; j < subCommunityElements.length; j++) {
for (j = 0; j < subCommunityElements.length; j++)
{
element.addContent(subCommunityElements[j]);
}
for (j = 0; j < collectionElements.length; j++) {
for (j = 0; j < collectionElements.length; j++)
{
element.addContent(collectionElements[j]);
}
@@ -461,19 +499,22 @@ public class StructBuilder {
}
/**
* Take a node list of collections and create the structure from them
* Take a node list of collections and create the structure from them
*
* @param context the context of the request
* @param context the context of the request
* @param collections the node list of collections to be created
* @param parent the parent community to whom the collections belong
* @param parent the parent community to whom the collections belong
*
* @return an Element array containing additional information about the
* created collections (e.g. the handle)
* created collections (e.g. the handle)
*/
private static Element[] handleCollections(Context context, NodeList collections, Community parent)
throws TransformerException, SQLException, AuthorizeException, IOException, Exception {
throws TransformerException, SQLException, AuthorizeException, IOException, Exception
{
Element[] elements = new Element[collections.getLength()];
for (int i = 0; i < collections.getLength(); i++) {
for (int i = 0; i < collections.getLength(); i++)
{
Element element = new Element("collection");
Collection collection = collectionService.create(context, parent);
@@ -482,9 +523,11 @@ public class StructBuilder {
// import the rest of the metadata
Node tn = collections.item(i);
for (Map.Entry<String, String> entry : collectionMap.entrySet()) {
for (Map.Entry<String, String> entry : collectionMap.entrySet())
{
NodeList nl = XPathAPI.selectNodeList(tn, entry.getKey());
if (nl.getLength() == 1) {
if (nl.getLength() == 1)
{
collectionService.setMetadata(context, collection, entry.getValue(), getStringValue(nl.item(0)));
}
}
@@ -497,37 +540,43 @@ public class StructBuilder {
nameElement.setText(collectionService.getMetadata(collection, "name"));
element.addContent(nameElement);
if (collectionService.getMetadata(collection, "short_description") != null) {
if (collectionService.getMetadata(collection, "short_description") != null)
{
Element descriptionElement = new Element("description");
descriptionElement.setText(collectionService.getMetadata(collection, "short_description"));
element.addContent(descriptionElement);
}
if (collectionService.getMetadata(collection, "introductory_text") != null) {
if (collectionService.getMetadata(collection, "introductory_text") != null)
{
Element introElement = new Element("intro");
introElement.setText(collectionService.getMetadata(collection, "introductory_text"));
element.addContent(introElement);
}
if (collectionService.getMetadata(collection, "copyright_text") != null) {
if (collectionService.getMetadata(collection, "copyright_text") != null)
{
Element copyrightElement = new Element("copyright");
copyrightElement.setText(collectionService.getMetadata(collection, "copyright_text"));
element.addContent(copyrightElement);
}
if (collectionService.getMetadata(collection, "side_bar_text") != null) {
if (collectionService.getMetadata(collection, "side_bar_text") != null)
{
Element sidebarElement = new Element("sidebar");
sidebarElement.setText(collectionService.getMetadata(collection, "side_bar_text"));
element.addContent(sidebarElement);
}
if (collectionService.getMetadata(collection, "license") != null) {
if (collectionService.getMetadata(collection, "license") != null)
{
Element sidebarElement = new Element("license");
sidebarElement.setText(collectionService.getMetadata(collection, "license"));
element.addContent(sidebarElement);
}
if (collectionService.getMetadata(collection, "provenance_description") != null) {
if (collectionService.getMetadata(collection, "provenance_description") != null)
{
Element sidebarElement = new Element("provenance");
sidebarElement.setText(collectionService.getMetadata(collection, "provenance_description"));
element.addContent(sidebarElement);

View File

@@ -7,93 +7,67 @@
*/
package org.dspace.app.bulkedit;
import org.dspace.content.Item;
import org.dspace.content.Collection;
import java.util.ArrayList;
import java.util.List;
import org.dspace.content.Collection;
import org.dspace.content.Item;
/**
* Utility class to store changes to item that may occur during a batch edit.
*
* @author Stuart Lewis
*/
public class BulkEditChange {
/**
* The item these changes relate to
*/
public class BulkEditChange
{
/** The item these changes relate to */
private Item item;
/**
* The List of hashtables with the new elements
*/
/** The List of hashtables with the new elements */
private List<BulkEditMetadataValue> adds;
/**
* The List of hashtables with the removed elements
*/
/** The List of hashtables with the removed elements */
private List<BulkEditMetadataValue> removes;
/**
* The List of hashtables with the unchanged elements
*/
/** The List of hashtables with the unchanged elements */
private List<BulkEditMetadataValue> constant;
/**
* The List of the complete set of new values (constant + adds)
*/
/** The List of the complete set of new values (constant + adds) */
private List<BulkEditMetadataValue> complete;
/**
* The list of old collections the item used to be mapped to
*/
/** The list of old collections the item used to be mapped to */
private List<Collection> oldMappedCollections;
/**
* The list of new collections the item has been mapped into
*/
/** The list of new collections the item has been mapped into */
private List<Collection> newMappedCollections;
/**
* The old owning collection
*/
/** The old owning collection */
private Collection oldOwningCollection;
/**
* The new owning collection
*/
/** The new owning collection */
private Collection newOwningCollection;
/**
* Is this a new item
*/
/** Is this a new item */
private boolean newItem;
/**
* Has this item been deleted?
*/
/** Has this item been deleted? */
private boolean deleted;
/**
* Has this item been withdrawn?
*/
/** Has this item been withdrawn? */
private boolean withdrawn;
/**
* Has this item been reinstated?
*/
/** Has this item been reinstated? */
private boolean reinstated;
/**
* Have any changes actually been made?
*/
/** Have any changes actually been made? */
private boolean empty;
/**
* Initialise a change holder for a new item
*/
public BulkEditChange() {
public BulkEditChange()
{
// Set the item to be null
item = null;
newItem = true;
@@ -115,7 +89,8 @@ public class BulkEditChange {
*
* @param i The Item to store
*/
public BulkEditChange(Item i) {
public BulkEditChange(Item i)
{
// Store the item
item = i;
newItem = false;
@@ -135,7 +110,8 @@ public class BulkEditChange {
*
* @param i The item
*/
public void setItem(Item i) {
public void setItem(Item i)
{
// Store the item
item = i;
}
@@ -145,7 +121,8 @@ public class BulkEditChange {
*
* @param dcv The value to add
*/
public void registerAdd(BulkEditMetadataValue dcv) {
public void registerAdd(BulkEditMetadataValue dcv)
{
// Add the added value
adds.add(dcv);
complete.add(dcv);
@@ -157,7 +134,8 @@ public class BulkEditChange {
*
* @param dcv The value to remove
*/
public void registerRemove(BulkEditMetadataValue dcv) {
public void registerRemove(BulkEditMetadataValue dcv)
{
// Add the removed value
removes.add(dcv);
empty = false;
@@ -168,7 +146,8 @@ public class BulkEditChange {
*
* @param dcv The value to keep unchanged
*/
public void registerConstant(BulkEditMetadataValue dcv) {
public void registerConstant(BulkEditMetadataValue dcv)
{
// Add the removed value
constant.add(dcv);
complete.add(dcv);
@@ -179,7 +158,8 @@ public class BulkEditChange {
*
* @param c The new mapped Collection
*/
public void registerNewMappedCollection(Collection c) {
public void registerNewMappedCollection(Collection c)
{
// Add the new owning Collection
newMappedCollections.add(c);
empty = false;
@@ -190,22 +170,27 @@ public class BulkEditChange {
*
* @param c The old mapped Collection
*/
public void registerOldMappedCollection(Collection c) {
public void registerOldMappedCollection(Collection c)
{
// Add the old owning Collection (if it isn't there already, or is an old collection)
boolean found = false;
if ((this.getOldOwningCollection() != null) &&
(this.getOldOwningCollection().getHandle().equals(c.getHandle()))) {
(this.getOldOwningCollection().getHandle().equals(c.getHandle())))
{
found = true;
}
for (Collection collection : oldMappedCollections) {
if (collection.getHandle().equals(c.getHandle())) {
for (Collection collection : oldMappedCollections)
{
if (collection.getHandle().equals(c.getHandle()))
{
found = true;
}
}
if (!found) {
if (!found)
{
oldMappedCollections.add(c);
empty = false;
}
@@ -217,7 +202,8 @@ public class BulkEditChange {
* @param oldC The old owning collection
* @param newC The new owning collection
*/
public void changeOwningCollection(Collection oldC, Collection newC) {
public void changeOwningCollection(Collection oldC, Collection newC)
{
// Store the old owning collection
oldOwningCollection = oldC;
@@ -231,7 +217,8 @@ public class BulkEditChange {
*
* @param newC The new owning collection
*/
public void setOwningCollection(Collection newC) {
public void setOwningCollection(Collection newC)
{
// Store the new owning collection
newOwningCollection = newC;
//empty = false;
@@ -242,7 +229,8 @@ public class BulkEditChange {
*
* @return The item
*/
public Item getItem() {
public Item getItem()
{
// Return the item
return item;
}
@@ -252,7 +240,8 @@ public class BulkEditChange {
*
* @return the list of elements and their values that have been added.
*/
public List<BulkEditMetadataValue> getAdds() {
public List<BulkEditMetadataValue> getAdds()
{
// Return the array
return adds;
}
@@ -262,7 +251,8 @@ public class BulkEditChange {
*
* @return the list of elements and their values that have been removed.
*/
public List<BulkEditMetadataValue> getRemoves() {
public List<BulkEditMetadataValue> getRemoves()
{
// Return the array
return removes;
}
@@ -272,7 +262,8 @@ public class BulkEditChange {
*
* @return the list of unchanged values
*/
public List<BulkEditMetadataValue> getConstant() {
public List<BulkEditMetadataValue> getConstant()
{
// Return the array
return constant;
}
@@ -282,7 +273,8 @@ public class BulkEditChange {
*
* @return the list of all values
*/
public List<BulkEditMetadataValue> getComplete() {
public List<BulkEditMetadataValue> getComplete()
{
// Return the array
return complete;
}
@@ -292,7 +284,8 @@ public class BulkEditChange {
*
* @return the list of new mapped collections
*/
public List<Collection> getNewMappedCollections() {
public List<Collection> getNewMappedCollections()
{
// Return the array
return newMappedCollections;
}
@@ -302,7 +295,8 @@ public class BulkEditChange {
*
* @return the list of old mapped collections
*/
public List<Collection> getOldMappedCollections() {
public List<Collection> getOldMappedCollections()
{
// Return the array
return oldMappedCollections;
}
@@ -312,7 +306,8 @@ public class BulkEditChange {
*
* @return the old owning collection
*/
public Collection getOldOwningCollection() {
public Collection getOldOwningCollection()
{
// Return the old owning collection
return oldOwningCollection;
}
@@ -322,7 +317,8 @@ public class BulkEditChange {
*
* @return the new owning collection
*/
public Collection getNewOwningCollection() {
public Collection getNewOwningCollection()
{
// Return the new owning collection
return newOwningCollection;
}
@@ -332,7 +328,8 @@ public class BulkEditChange {
*
* @return Whether or not this is for a new item
*/
public boolean isNewItem() {
public boolean isNewItem()
{
// Return the new item status
return newItem;
}
@@ -342,7 +339,8 @@ public class BulkEditChange {
*
* @return Whether or not this is for a deleted item
*/
public boolean isDeleted() {
public boolean isDeleted()
{
// Return the new item status
return deleted;
}
@@ -361,7 +359,8 @@ public class BulkEditChange {
*
* @return Whether or not this is for a withdrawn item
*/
public boolean isWithdrawn() {
public boolean isWithdrawn()
{
// Return the new item status
return withdrawn;
}
@@ -380,7 +379,8 @@ public class BulkEditChange {
*
* @return Whether or not this is for a reinstated item
*/
public boolean isReinstated() {
public boolean isReinstated()
{
// Return the new item status
return reinstated;
}
@@ -399,7 +399,8 @@ public class BulkEditChange {
*
* @return Whether or not changes have been made
*/
public boolean hasChanges() {
public boolean hasChanges()
{
return !empty;
}
}

View File

@@ -7,26 +7,7 @@
*/
package org.dspace.app.bulkedit;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.lang3.StringUtils;
import org.dspace.authority.AuthorityValue;
import org.dspace.authority.factory.AuthorityServiceFactory;
import org.dspace.authority.service.AuthorityValueService;
@@ -35,14 +16,19 @@ import org.dspace.content.Item;
import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchema;
import org.dspace.content.MetadataValue;
import org.dspace.content.authority.Choices;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService;
import org.dspace.content.service.MetadataFieldService;
import org.dspace.content.service.MetadataSchemaService;
import org.dspace.content.authority.Choices;
import org.dspace.core.Context;
import org.dspace.services.factory.DSpaceServicesFactory;
import java.util.*;
import java.util.regex.Pattern;
import java.util.regex.Matcher;
import java.io.*;
/**
* Utility class to read and write CSV files
*
@@ -52,74 +38,50 @@ import org.dspace.services.factory.DSpaceServicesFactory;
*
* This class has been made serializable, as it is stored in a Session.
* Is it wise to:
* a) be putting this into a user's session?
* b) holding an entire CSV upload in memory?
* a) be putting this into a user's session?
* b) holding an entire CSV upload in memory?
*
* @author Stuart Lewis
*/
public class DSpaceCSV implements Serializable {
/**
* The headings of the CSV file
*/
public class DSpaceCSV implements Serializable
{
/** The headings of the CSV file */
protected List<String> headings;
/**
* An array list of CSV lines
*/
/** An array list of CSV lines */
protected List<DSpaceCSVLine> lines;
/**
* A counter of how many CSV lines this object holds
*/
/** A counter of how many CSV lines this object holds */
protected int counter;
/**
* The value separator (defaults to double pipe '||')
*/
/** The value separator (defaults to double pipe '||') */
protected String valueSeparator;
/**
* The value separator in an escaped form for using in regexes
*/
/** The value separator in an escaped form for using in regexes */
protected String escapedValueSeparator;
/**
* The field separator (defaults to comma)
*/
/** The field separator (defaults to comma) */
protected String fieldSeparator;
/**
* The field separator in an escaped form for using in regexes
*/
/** The field separator in an escaped form for using in regexes */
protected String escapedFieldSeparator;
/**
* The authority separator (defaults to double colon '::')
*/
/** The authority separator (defaults to double colon '::') */
protected String authoritySeparator;
/**
* The authority separator in an escaped form for using in regexes
*/
/** The authority separator in an escaped form for using in regexes */
protected String escapedAuthoritySeparator;
protected transient final ItemService itemService = ContentServiceFactory.getInstance().getItemService();
protected transient final MetadataSchemaService metadataSchemaService =
ContentServiceFactory.getInstance().getMetadataSchemaService();
protected transient final MetadataFieldService metadataFieldService =
ContentServiceFactory.getInstance().getMetadataFieldService();
protected transient final AuthorityValueService authorityValueService =
AuthorityServiceFactory.getInstance().getAuthorityValueService();
protected transient final MetadataSchemaService metadataSchemaService = ContentServiceFactory.getInstance().getMetadataSchemaService();
protected transient final MetadataFieldService metadataFieldService = ContentServiceFactory.getInstance().getMetadataFieldService();
protected transient final AuthorityValueService authorityValueService = AuthorityServiceFactory.getInstance().getAuthorityValueService();
/**
* Whether to export all metadata such as handles and provenance information
*/
/** Whether to export all metadata such as handles and provenance information */
protected boolean exportAll;
/**
* A list of metadata elements to ignore
*/
/** A list of metadata elements to ignore */
protected Map<String, String> ignore;
@@ -128,7 +90,8 @@ public class DSpaceCSV implements Serializable {
*
* @param exportAll Whether to export all metadata such as handles and provenance information
*/
public DSpaceCSV(boolean exportAll) {
public DSpaceCSV(boolean exportAll)
{
// Initialise the class
init();
@@ -141,37 +104,48 @@ public class DSpaceCSV implements Serializable {
*
* @param f The file to read from
* @param c The DSpace Context
*
* @throws Exception thrown if there is an error reading or processing the file
*/
public DSpaceCSV(File f, Context c) throws Exception {
public DSpaceCSV(File f, Context c) throws Exception
{
// Initialise the class
init();
// Open the CSV file
BufferedReader input = null;
try {
input = new BufferedReader(new InputStreamReader(new FileInputStream(f), "UTF-8"));
try
{
input = new BufferedReader(new InputStreamReader(new FileInputStream(f),"UTF-8"));
// Read the heading line
String head = input.readLine();
String[] headingElements = head.split(escapedFieldSeparator);
int columnCounter = 0;
for (String element : headingElements) {
for (String element : headingElements)
{
columnCounter++;
// Remove surrounding quotes if there are any
if ((element.startsWith("\"")) && (element.endsWith("\""))) {
if ((element.startsWith("\"")) && (element.endsWith("\"")))
{
element = element.substring(1, element.length() - 1);
}
// Store the heading
if ("collection".equals(element)) {
if ("collection".equals(element))
{
// Store the heading
headings.add(element);
} else if ("action".equals(element)) { // Store the action
}
// Store the action
else if ("action".equals(element))
{
// Store the heading
headings.add(element);
} else if (!"id".equals(element)) {
}
else if (!"id".equals(element))
{
String authorityPrefix = "";
AuthorityValue authorityValueType = authorityValueService.getAuthorityValueType(element);
if (authorityValueType != null) {
@@ -206,8 +180,7 @@ public class DSpaceCSV implements Serializable {
}
// Check that the metadata element exists in the schema
MetadataField foundField = metadataFieldService
.findByElement(c, foundSchema, metadataElement, metadataQualifier);
MetadataField foundField = metadataFieldService.findByElement(c, foundSchema, metadataElement, metadataQualifier);
if (foundField == null) {
throw new MetadataImportInvalidHeadingException(clean[0],
MetadataImportInvalidHeadingException.ELEMENT,
@@ -223,7 +196,8 @@ public class DSpaceCSV implements Serializable {
StringBuilder lineBuilder = new StringBuilder();
String lineRead;
while ((lineRead = input.readLine()) != null) {
while ((lineRead = input.readLine()) != null)
{
if (lineBuilder.length() > 0) {
// Already have a previously read value - add this line
lineBuilder.append("\n").append(lineRead);
@@ -262,8 +236,11 @@ public class DSpaceCSV implements Serializable {
addItem(lineRead);
}
}
} finally {
if (input != null) {
}
finally
{
if (input != null)
{
input.close();
}
}
@@ -272,7 +249,8 @@ public class DSpaceCSV implements Serializable {
/**
* Initialise this class with values from dspace.cfg
*/
protected void init() {
protected void init()
{
// Set the value separator
setValueSeparator();
@@ -295,16 +273,13 @@ public class DSpaceCSV implements Serializable {
ignore = new HashMap<>();
// Specify default values
String[] defaultValues =
new String[] {
"dc.date.accessioned, dc.date.available, dc.date.updated, dc.description.provenance"
};
String[] toIgnoreArray =
DSpaceServicesFactory.getInstance()
.getConfigurationService()
.getArrayProperty("bulkedit.ignore-on-export", defaultValues);
for (String toIgnoreString : toIgnoreArray) {
if (!"".equals(toIgnoreString.trim())) {
String[] defaultValues = new String[]{"dc.date.accessioned, dc.date.available, " +
"dc.date.updated, dc.description.provenance"};
String[] toIgnoreArray = DSpaceServicesFactory.getInstance().getConfigurationService().getArrayProperty("bulkedit.ignore-on-export", defaultValues);
for (String toIgnoreString : toIgnoreArray)
{
if (!"".equals(toIgnoreString.trim()))
{
ignore.put(toIgnoreString.trim(), toIgnoreString.trim());
}
}
@@ -332,13 +307,16 @@ public class DSpaceCSV implements Serializable {
*
* If not set, defaults to double pipe '||'
*/
private void setValueSeparator() {
private void setValueSeparator()
{
// Get the value separator
valueSeparator = DSpaceServicesFactory.getInstance().getConfigurationService()
.getProperty("bulkedit.valueseparator");
if ((valueSeparator != null) && (!"".equals(valueSeparator.trim()))) {
valueSeparator = DSpaceServicesFactory.getInstance().getConfigurationService().getProperty("bulkedit.valueseparator");
if ((valueSeparator != null) && (!"".equals(valueSeparator.trim())))
{
valueSeparator = valueSeparator.trim();
} else {
}
else
{
valueSeparator = "||";
}
@@ -358,22 +336,32 @@ public class DSpaceCSV implements Serializable {
* Special values are 'tab', 'hash' and 'semicolon' which will
* get substituted from the text to the value.
*/
private void setFieldSeparator() {
private void setFieldSeparator()
{
// Get the value separator
fieldSeparator = DSpaceServicesFactory.getInstance().getConfigurationService()
.getProperty("bulkedit.fieldseparator");
if ((fieldSeparator != null) && (!"".equals(fieldSeparator.trim()))) {
fieldSeparator =DSpaceServicesFactory.getInstance().getConfigurationService().getProperty("bulkedit.fieldseparator");
if ((fieldSeparator != null) && (!"".equals(fieldSeparator.trim())))
{
fieldSeparator = fieldSeparator.trim();
if ("tab".equals(fieldSeparator)) {
if ("tab".equals(fieldSeparator))
{
fieldSeparator = "\t";
} else if ("semicolon".equals(fieldSeparator)) {
}
else if ("semicolon".equals(fieldSeparator))
{
fieldSeparator = ";";
} else if ("hash".equals(fieldSeparator)) {
}
else if ("hash".equals(fieldSeparator))
{
fieldSeparator = "#";
} else {
}
else
{
fieldSeparator = fieldSeparator.trim();
}
} else {
}
else
{
fieldSeparator = ",";
}
@@ -383,20 +371,23 @@ public class DSpaceCSV implements Serializable {
escapedFieldSeparator = match.replaceAll("\\\\$1");
}
/**
/**
* Set the authority separator for value with authority data.
*
* Is set in dspace.cfg as bulkedit.authorityseparator
*
* If not set, defaults to double colon '::'
*/
private void setAuthoritySeparator() {
private void setAuthoritySeparator()
{
// Get the value separator
authoritySeparator = DSpaceServicesFactory.getInstance().getConfigurationService()
.getProperty("bulkedit.authorityseparator");
if ((authoritySeparator != null) && (!"".equals(authoritySeparator.trim()))) {
authoritySeparator = DSpaceServicesFactory.getInstance().getConfigurationService().getProperty("bulkedit.authorityseparator");
if ((authoritySeparator != null) && (!"".equals(authoritySeparator.trim())))
{
authoritySeparator = authoritySeparator.trim();
} else {
}
else
{
authoritySeparator = "::";
}
@@ -410,9 +401,11 @@ public class DSpaceCSV implements Serializable {
* Add a DSpace item to the CSV file
*
* @param i The DSpace item
*
* @throws Exception if something goes wrong with adding the Item
*/
public final void addItem(Item i) throws Exception {
public final void addItem(Item i) throws Exception
{
// If the item does not have an "owningCollection" the the below "getHandle()" call will fail
// This should not happen but is here for safety.
if (i.getOwningCollection() == null) {
@@ -428,42 +421,49 @@ public class DSpaceCSV implements Serializable {
// Add in any mapped collections
List<Collection> collections = i.getCollections();
for (Collection c : collections) {
for (Collection c : collections)
{
// Only add if it is not the owning collection
if (!c.getHandle().equals(owningCollectionHandle)) {
if (!c.getHandle().equals(owningCollectionHandle))
{
line.add("collection", c.getHandle());
}
}
// Populate it
List<MetadataValue> md = itemService.getMetadata(i, Item.ANY, Item.ANY, Item.ANY, Item.ANY);
for (MetadataValue value : md) {
for (MetadataValue value : md)
{
MetadataField metadataField = value.getMetadataField();
MetadataSchema metadataSchema = metadataField.getMetadataSchema();
// Get the key (schema.element)
String key = metadataSchema.getName() + "." + metadataField.getElement();
// Add the qualifier if there is one (schema.element.qualifier)
if (metadataField.getQualifier() != null) {
if (metadataField.getQualifier() != null)
{
key = key + "." + metadataField.getQualifier();
}
// Add the language if there is one (schema.element.qualifier[langauge])
//if ((value.language != null) && (!"".equals(value.language)))
if (value.getLanguage() != null) {
if (value.getLanguage() != null)
{
key = key + "[" + value.getLanguage() + "]";
}
// Store the item
if (exportAll || okToExport(metadataField)) {
if (exportAll || okToExport(metadataField))
{
// Add authority and confidence if authority is not null
String mdValue = value.getValue();
if (value.getAuthority() != null && !"".equals(value.getAuthority())) {
mdValue += authoritySeparator + value.getAuthority() + authoritySeparator + (value
.getConfidence() != -1 ? value.getConfidence() : Choices.CF_ACCEPTED);
if (value.getAuthority() != null && !"".equals(value.getAuthority()))
{
mdValue += authoritySeparator + value.getAuthority() + authoritySeparator + (value.getConfidence() != -1 ? value.getConfidence() : Choices.CF_ACCEPTED);
}
line.add(key, mdValue);
if (!headings.contains(key)) {
if (!headings.contains(key))
{
headings.add(key);
}
}
@@ -478,10 +478,12 @@ public class DSpaceCSV implements Serializable {
* @param line The line of elements
* @throws Exception Thrown if an error occurs when adding the item
*/
public final void addItem(String line) throws Exception {
public final void addItem(String line) throws Exception
{
// Check to see if the last character is a field separator, which hides the last empty column
boolean last = false;
if (line.endsWith(fieldSeparator)) {
if (line.endsWith(fieldSeparator))
{
// Add a space to the end, then remove it later
last = true;
line += " ";
@@ -494,12 +496,15 @@ public class DSpaceCSV implements Serializable {
// Merge parts with embedded separators
boolean alldone = false;
while (!alldone) {
while (!alldone)
{
boolean found = false;
int i = 0;
for (String part : bits) {
for (String part : bits)
{
int bitcounter = part.length() - part.replaceAll("\"", "").length();
if ((part.startsWith("\"")) && ((!part.endsWith("\"")) || ((bitcounter & 1) == 1))) {
if ((part.startsWith("\"")) && ((!part.endsWith("\"")) || ((bitcounter & 1) == 1)))
{
found = true;
String add = bits.get(i) + fieldSeparator + bits.get(i + 1);
bits.remove(i);
@@ -514,8 +519,10 @@ public class DSpaceCSV implements Serializable {
// Deal with quotes around the elements
int i = 0;
for (String part : bits) {
if ((part.startsWith("\"")) && (part.endsWith("\""))) {
for (String part : bits)
{
if ((part.startsWith("\"")) && (part.endsWith("\"")))
{
part = part.substring(1, part.length() - 1);
bits.set(i, part);
}
@@ -524,8 +531,10 @@ public class DSpaceCSV implements Serializable {
// Remove embedded quotes
i = 0;
for (String part : bits) {
if (part.contains("\"\"")) {
for (String part : bits)
{
if (part.contains("\"\""))
{
part = part.replaceAll("\"\"", "\"");
bits.set(i, part);
}
@@ -537,25 +546,34 @@ public class DSpaceCSV implements Serializable {
DSpaceCSVLine csvLine;
// Is this an existing item, or a new item (where id = '+')
if ("+".equals(id)) {
if ("+".equals(id))
{
csvLine = new DSpaceCSVLine();
} else {
try {
}
else
{
try
{
csvLine = new DSpaceCSVLine(UUID.fromString(id));
} catch (NumberFormatException nfe) {
}
catch (NumberFormatException nfe)
{
System.err.println("Invalid item identifier: " + id);
System.err.println("Please check your CSV file for information. " +
"Item id must be numeric, or a '+' to add a new item");
throw (nfe);
"Item id must be numeric, or a '+' to add a new item");
throw(nfe);
}
}
// Add the rest of the parts
i = 0;
for (String part : bits) {
if (i > 0) {
for (String part : bits)
{
if (i > 0)
{
// Is this a last empty item?
if ((last) && (i == headings.size())) {
if ((last) && (i == headings.size()))
{
part = "";
}
@@ -567,8 +585,10 @@ public class DSpaceCSV implements Serializable {
}
csvLine.add(headings.get(i - 1), null);
String[] elements = part.split(escapedValueSeparator);
for (String element : elements) {
if ((element != null) && (!"".equals(element))) {
for (String element : elements)
{
if ((element != null) && (!"".equals(element)))
{
csvLine.add(headings.get(i - 1), element);
}
}
@@ -584,7 +604,8 @@ public class DSpaceCSV implements Serializable {
*
* @return The lines
*/
public final List<DSpaceCSVLine> getCSVLines() {
public final List<DSpaceCSVLine> getCSVLines()
{
// Return the lines
return lines;
}
@@ -594,19 +615,22 @@ public class DSpaceCSV implements Serializable {
*
* @return the array of CSV formatted Strings
*/
public final String[] getCSVLinesAsStringArray() {
public final String[] getCSVLinesAsStringArray()
{
// Create the headings line
String[] csvLines = new String[counter + 1];
csvLines[0] = "id" + fieldSeparator + "collection";
List<String> headingsCopy = new ArrayList<>(headings);
Collections.sort(headingsCopy);
for (String value : headingsCopy) {
for (String value : headingsCopy)
{
csvLines[0] = csvLines[0] + fieldSeparator + value;
}
Iterator<DSpaceCSVLine> i = lines.iterator();
int c = 1;
while (i.hasNext()) {
while (i.hasNext())
{
csvLines[c++] = i.next().toCSV(headingsCopy, fieldSeparator, valueSeparator);
}
@@ -617,13 +641,15 @@ public class DSpaceCSV implements Serializable {
* Save the CSV file to the given filename
*
* @param filename The filename to save the CSV file to
*
* @throws IOException Thrown if an error occurs when writing the file
*/
public final void save(String filename) throws IOException {
public final void save(String filename) throws IOException
{
// Save the file
BufferedWriter out = new BufferedWriter(
new OutputStreamWriter(
new FileOutputStream(filename), "UTF-8"));
new OutputStreamWriter(
new FileOutputStream(filename), "UTF-8"));
for (String csvLine : getCSVLinesAsStringArray()) {
out.write(csvLine + "\n");
}
@@ -640,10 +666,12 @@ public class DSpaceCSV implements Serializable {
* @param md The Metadatum to examine
* @return Whether or not it is OK to export this element
*/
protected boolean okToExport(MetadataField md) {
protected boolean okToExport(MetadataField md)
{
// Now compare with the list to ignore
String key = md.getMetadataSchema().getName() + "." + md.getElement();
if (md.getQualifier() != null) {
if (md.getQualifier() != null)
{
key += "." + md.getQualifier();
}
if (ignore.get(key) != null) {
@@ -659,7 +687,8 @@ public class DSpaceCSV implements Serializable {
*
* @return The headings
*/
public List<String> getHeadings() {
public List<String> getHeadings()
{
return headings;
}
@@ -669,11 +698,13 @@ public class DSpaceCSV implements Serializable {
* @return The formatted String as a csv
*/
@Override
public final String toString() {
public final String toString()
{
// Return the csv as one long string
StringBuilder csvLines = new StringBuilder();
String[] lines = this.getCSVLinesAsStringArray();
for (String line : lines) {
for (String line : lines)
{
csvLines.append(line).append("\n");
}
return csvLines.toString();

View File

@@ -7,41 +7,30 @@
*/
package org.dspace.app.bulkedit;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.UUID;
import org.dspace.authority.AuthorityValue;
import org.dspace.authority.factory.AuthorityServiceFactory;
import org.dspace.authority.service.AuthorityValueService;
import java.io.Serializable;
import java.util.*;
/**
* Utility class to store a line from a CSV file
*
* @author Stuart Lewis
*/
public class DSpaceCSVLine implements Serializable {
/**
* The item id of the item represented by this line. -1 is for a new item
*/
public class DSpaceCSVLine implements Serializable
{
/** The item id of the item represented by this line. -1 is for a new item */
private final UUID id;
/**
* The elements in this line in a hashtable, keyed by the metadata type
*/
/** The elements in this line in a hashtable, keyed by the metadata type */
private final Map<String, ArrayList> items;
protected transient final AuthorityValueService authorityValueService
= AuthorityServiceFactory.getInstance().getAuthorityValueService();
= AuthorityServiceFactory.getInstance().getAuthorityValueService();
/**
* ensuring that the order-sensible columns of the csv are processed in the correct order
*/
/** ensuring that the order-sensible columns of the csv are processed in the correct order */
private transient final Comparator<? super String> headerComparator = new Comparator<String>() {
@Override
public int compare(String md1, String md2) {
@@ -52,7 +41,8 @@ public class DSpaceCSVLine implements Serializable {
int compare;
if (source1 == null && source2 != null) {
compare = -1;
} else if (source1 != null && source2 == null) {
}
else if (source1 != null && source2 == null) {
compare = 1;
} else {
// the order of the rest does not matter
@@ -67,7 +57,8 @@ public class DSpaceCSVLine implements Serializable {
*
* @param itemId The item ID of the line
*/
public DSpaceCSVLine(UUID itemId) {
public DSpaceCSVLine(UUID itemId)
{
// Store the ID + separator, and initialise the hashtable
this.id = itemId;
items = new TreeMap<>(headerComparator);
@@ -77,7 +68,8 @@ public class DSpaceCSVLine implements Serializable {
/**
* Create a new CSV line for a new item
*/
public DSpaceCSVLine() {
public DSpaceCSVLine()
{
// Set the ID to be null, and initialise the hashtable
this.id = null;
this.items = new TreeMap<>(headerComparator);
@@ -88,7 +80,8 @@ public class DSpaceCSVLine implements Serializable {
*
* @return The item ID
*/
public UUID getID() {
public UUID getID()
{
// Return the ID
return id;
}
@@ -96,17 +89,20 @@ public class DSpaceCSVLine implements Serializable {
/**
* Add a new metadata value to this line
*
* @param key The metadata key (e.g. dc.contributor.author)
* @param key The metadata key (e.g. dc.contributor.author)
* @param value The metadata value
*/
public void add(String key, String value) {
public void add(String key, String value)
{
// Create the array list if we need to
if (items.get(key) == null) {
if (items.get(key) == null)
{
items.put(key, new ArrayList<String>());
}
// Store the item if it is not null
if (value != null) {
if (value != null)
{
items.get(key).add(value);
}
}
@@ -117,7 +113,8 @@ public class DSpaceCSVLine implements Serializable {
* @param key The metadata key
* @return All the elements that match
*/
public List<String> get(String key) {
public List<String> get(String key)
{
// Return any relevant values
return items.get(key);
}
@@ -127,11 +124,12 @@ public class DSpaceCSVLine implements Serializable {
*
* @return The action (may be blank, 'withdraw', 'reinstate' or 'delete')
*/
public String getAction() {
public String getAction()
{
if (items.containsKey("action")) {
ArrayList actions = items.get("action");
if (actions.size() > 0) {
return ((String) actions.get(0)).trim();
return ((String)actions.get(0)).trim();
}
}
return "";
@@ -142,7 +140,8 @@ public class DSpaceCSVLine implements Serializable {
*
* @return An enumeration of all the keys
*/
public Set<String> keys() {
public Set<String> keys()
{
// Return the keys
return items.keySet();
}
@@ -150,23 +149,26 @@ public class DSpaceCSVLine implements Serializable {
/**
* Write this line out as a CSV formatted string, in the order given by the headings provided
*
* @param headings The headings which define the order the elements must be presented in
* @param headings The headings which define the order the elements must be presented in
* @param fieldSeparator separator between metadata fields
* @param valueSeparator separator between metadata values (within a field)
* @return The CSV formatted String
*/
protected String toCSV(List<String> headings, String fieldSeparator, String valueSeparator) {
protected String toCSV(List<String> headings, String fieldSeparator, String valueSeparator)
{
StringBuilder bits = new StringBuilder();
// Add the id
bits.append("\"").append(id).append("\"").append(fieldSeparator);
bits.append(valueToCSV(items.get("collection"), valueSeparator));
bits.append(valueToCSV(items.get("collection"),valueSeparator));
// Add the rest of the elements
for (String heading : headings) {
for (String heading : headings)
{
bits.append(fieldSeparator);
List<String> values = items.get(heading);
if (values != null && !"collection".equals(heading)) {
if (values != null && !"collection".equals(heading))
{
bits.append(valueToCSV(values, valueSeparator));
}
}
@@ -177,26 +179,33 @@ public class DSpaceCSVLine implements Serializable {
/**
* Internal method to create a CSV formatted String joining a given set of elements
*
* @param values The values to create the string from
* @param values The values to create the string from
* @param valueSeparator value separator
* @return The line as a CSV formatted String
*/
protected String valueToCSV(List<String> values, String valueSeparator) {
protected String valueToCSV(List<String> values, String valueSeparator)
{
// Check there is some content
if (values == null) {
if (values == null)
{
return "";
}
// Get on with the work
String s;
if (values.size() == 1) {
if (values.size() == 1)
{
s = values.get(0);
} else {
}
else
{
// Concatenate any fields together
StringBuilder str = new StringBuilder();
for (String value : values) {
if (str.length() > 0) {
for (String value : values)
{
if (str.length() > 0)
{
str.append(valueSeparator);
}

View File

@@ -7,46 +7,36 @@
*/
package org.dspace.app.bulkedit;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import com.google.common.collect.Iterators;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.apache.commons.cli.*;
import org.dspace.content.*;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.handle.factory.HandleServiceFactory;
import java.util.ArrayList;
import java.sql.SQLException;
import java.util.Iterator;
import java.util.List;
/**
* Metadata exporter to allow the batch export of metadata into a file
*
* @author Stuart Lewis
*/
public class MetadataExport {
/**
* The items to export
*/
public class MetadataExport
{
/** The items to export */
protected Iterator<Item> toExport;
protected ItemService itemService;
protected Context context;
/**
* Whether to export all metadata, or just normally edited metadata
*/
/** Whether to export all metadata, or just normally edited metadata */
protected boolean exportAll;
protected MetadataExport() {
@@ -56,11 +46,12 @@ public class MetadataExport {
/**
* Set up a new metadata export
*
* @param c The Context
* @param toExport The ItemIterator of items to export
* @param c The Context
* @param toExport The ItemIterator of items to export
* @param exportAll whether to export all metadata or not (include handle, provenance etc)
*/
public MetadataExport(Context c, Iterator<Item> toExport, boolean exportAll) {
public MetadataExport(Context c, Iterator<Item> toExport, boolean exportAll)
{
itemService = ContentServiceFactory.getInstance().getItemService();
// Store the export settings
@@ -72,19 +63,23 @@ public class MetadataExport {
/**
* Method to export a community (and sub-communities and collections)
*
* @param c The Context
* @param toExport The Community to export
* @param c The Context
* @param toExport The Community to export
* @param exportAll whether to export all metadata or not (include handle, provenance etc)
*/
public MetadataExport(Context c, Community toExport, boolean exportAll) {
public MetadataExport(Context c, Community toExport, boolean exportAll)
{
itemService = ContentServiceFactory.getInstance().getItemService();
try {
try
{
// Try to export the community
this.toExport = buildFromCommunity(c, toExport, 0);
this.exportAll = exportAll;
this.context = c;
} catch (SQLException sqle) {
}
catch (SQLException sqle)
{
// Something went wrong...
System.err.println("Error running exporter:");
sqle.printStackTrace(System.err);
@@ -95,43 +90,49 @@ public class MetadataExport {
/**
* Build an array list of item ids that are in a community (include sub-communities and collections)
*
* @param context DSpace context
* @param context DSpace context
* @param community The community to build from
* @param indent How many spaces to use when writing out the names of items added
* @param indent How many spaces to use when writing out the names of items added
* @return The list of item ids
* @throws SQLException if database error
*/
protected Iterator<Item> buildFromCommunity(Context context, Community community, int indent)
throws SQLException {
throws SQLException
{
// Add all the collections
List<Collection> collections = community.getCollections();
Iterator<Item> result = null;
for (Collection collection : collections) {
for (int i = 0; i < indent; i++) {
for (Collection collection : collections)
{
for (int i = 0; i < indent; i++)
{
System.out.print(" ");
}
Iterator<Item> items = itemService.findByCollection(context, collection);
result = addItemsToResult(result, items);
result = addItemsToResult(result,items);
}
// Add all the sub-communities
List<Community> communities = community.getSubcommunities();
for (Community subCommunity : communities) {
for (int i = 0; i < indent; i++) {
for (Community subCommunity : communities)
{
for (int i = 0; i < indent; i++)
{
System.out.print(" ");
}
Iterator<Item> items = buildFromCommunity(context, subCommunity, indent + 1);
result = addItemsToResult(result, items);
result = addItemsToResult(result,items);
}
return result;
}
private Iterator<Item> addItemsToResult(Iterator<Item> result, Iterator<Item> items) {
if (result == null) {
if(result == null)
{
result = items;
} else {
}else{
result = Iterators.concat(result, items);
}
@@ -143,14 +144,17 @@ public class MetadataExport {
*
* @return the exported CSV lines
*/
public DSpaceCSV export() {
try {
public DSpaceCSV export()
{
try
{
Context.Mode originalMode = context.getCurrentMode();
context.setMode(Context.Mode.READ_ONLY);
// Process each item
DSpaceCSV csv = new DSpaceCSV(exportAll);
while (toExport.hasNext()) {
while (toExport.hasNext())
{
Item item = toExport.next();
csv.addItem(item);
context.uncacheEntity(item);
@@ -159,7 +163,9 @@ public class MetadataExport {
context.setMode(originalMode);
// Return the results
return csv;
} catch (Exception e) {
}
catch (Exception e)
{
// Something went wrong...
System.err.println("Error exporting to CSV:");
e.printStackTrace();
@@ -170,10 +176,11 @@ public class MetadataExport {
/**
* Print the help message
*
* @param options The command line options the user gave
* @param options The command line options the user gave
* @param exitCode the system exit code to use
*/
private static void printHelp(Options options, int exitCode) {
private static void printHelp(Options options, int exitCode)
{
// print the help message
HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("MetadataExport\n", options);
@@ -183,12 +190,13 @@ public class MetadataExport {
}
/**
* main method to run the metadata exporter
*
* @param argv the command line arguments given
* @throws Exception if error occurs
*/
public static void main(String[] argv) throws Exception {
* main method to run the metadata exporter
*
* @param argv the command line arguments given
* @throws Exception if error occurs
*/
public static void main(String[] argv) throws Exception
{
// Create an options object and populate it
CommandLineParser parser = new PosixParser();
@@ -196,26 +204,30 @@ public class MetadataExport {
options.addOption("i", "id", true, "ID or handle of thing to export (item, collection, or community)");
options.addOption("f", "file", true, "destination where you want file written");
options.addOption("a", "all", false,
"include all metadata fields that are not normally changed (e.g. provenance)");
options.addOption("a", "all", false, "include all metadata fields that are not normally changed (e.g. provenance)");
options.addOption("h", "help", false, "help");
CommandLine line = null;
try {
try
{
line = parser.parse(options, argv);
} catch (ParseException pe) {
}
catch (ParseException pe)
{
System.err.println("Error with commands.");
printHelp(options, 1);
System.exit(0);
}
if (line.hasOption('h')) {
if (line.hasOption('h'))
{
printHelp(options, 0);
}
// Check a filename is given
if (!line.hasOption('f')) {
if (!line.hasOption('f'))
{
System.err.println("Required parameter -f missing!");
printHelp(options, 1);
}
@@ -235,31 +247,42 @@ public class MetadataExport {
ContentServiceFactory contentServiceFactory = ContentServiceFactory.getInstance();
// Check we have an item OK
ItemService itemService = contentServiceFactory.getItemService();
if (!line.hasOption('i')) {
if (!line.hasOption('i'))
{
System.out.println("Exporting whole repository WARNING: May take some time!");
exporter = new MetadataExport(c, itemService.findAll(c), exportAll);
} else {
}
else
{
String handle = line.getOptionValue('i');
DSpaceObject dso = HandleServiceFactory.getInstance().getHandleService().resolveToObject(c, handle);
if (dso == null) {
if (dso == null)
{
System.err.println("Item '" + handle + "' does not resolve to an item in your repository!");
printHelp(options, 1);
}
if (dso.getType() == Constants.ITEM) {
if (dso.getType() == Constants.ITEM)
{
System.out.println("Exporting item '" + dso.getName() + "' (" + handle + ")");
List<Item> item = new ArrayList<>();
item.add((Item) dso);
exporter = new MetadataExport(c, item.iterator(), exportAll);
} else if (dso.getType() == Constants.COLLECTION) {
}
else if (dso.getType() == Constants.COLLECTION)
{
System.out.println("Exporting collection '" + dso.getName() + "' (" + handle + ")");
Collection collection = (Collection) dso;
Collection collection = (Collection)dso;
toExport = itemService.findByCollection(c, collection);
exporter = new MetadataExport(c, toExport, exportAll);
} else if (dso.getType() == Constants.COMMUNITY) {
}
else if (dso.getType() == Constants.COMMUNITY)
{
System.out.println("Exporting community '" + dso.getName() + "' (" + handle + ")");
exporter = new MetadataExport(c, (Community) dso, exportAll);
} else {
exporter = new MetadataExport(c, (Community)dso, exportAll);
}
else
{
System.err.println("Error identifying '" + handle + "'");
System.exit(1);
}

View File

@@ -12,23 +12,26 @@ package org.dspace.app.bulkedit;
*
* @author Stuart Lewis
*/
public class MetadataImportException extends Exception {
public class MetadataImportException extends Exception
{
/**
* Instantiate a new MetadataImportException
*
* @param message the error message
*/
public MetadataImportException(String message) {
super(message);
public MetadataImportException(String message)
{
super(message);
}
/**
* Instantiate a new MetadataImportException
*
* @param message the error message
* @param message the error message
* @param exception the root cause
*/
public MetadataImportException(String message, Exception exception) {
super(message, exception);
public MetadataImportException(String message, Exception exception)
{
super(message, exception);
}
}

View File

@@ -12,51 +12,39 @@ package org.dspace.app.bulkedit;
*
* @author Stuart Lewis
*/
public class MetadataImportInvalidHeadingException extends Exception {
/**
* The type of error (schema or element)
*/
public class MetadataImportInvalidHeadingException extends Exception
{
/** The type of error (schema or element) */
private int type;
/**
* The bad heading
*/
/** The bad heading */
private String badHeading;
/**
* The column number
*/
/** The column number */
private int column;
/**
* Error with the schema
*/
/** Error with the schema */
public static final int SCHEMA = 0;
/**
* Error with the element
*/
/** Error with the element */
public static final int ELEMENT = 1;
/**
* Error with a missing header
*/
/** Error with a missing header */
public static final int MISSING = 98;
/**
* Error with the whole entry
*/
/** Error with the whole entry */
public static final int ENTRY = 99;
/**
* Instantiate a new MetadataImportInvalidHeadingException
*
* @param message the error message
* @param theType the type of the error
* @param message the error message
* @param theType the type of the error
* @param theColumn column number
*/
public MetadataImportInvalidHeadingException(String message, int theType, int theColumn) {
public MetadataImportInvalidHeadingException(String message, int theType, int theColumn)
{
super(message);
badHeading = message;
type = theType;
@@ -66,9 +54,10 @@ public class MetadataImportInvalidHeadingException extends Exception {
/**
* Get the type of the exception
*
* @return the type of the exception
* @return the type of the exception
*/
public String getType() {
public String getType()
{
return "" + type;
}
@@ -77,7 +66,8 @@ public class MetadataImportInvalidHeadingException extends Exception {
*
* @return the invalid heading
*/
public String getBadHeader() {
public String getBadHeader()
{
return badHeading;
}
@@ -86,7 +76,8 @@ public class MetadataImportInvalidHeadingException extends Exception {
*
* @return the invalid column number
*/
public int getColumn() {
public int getColumn()
{
return column;
}
@@ -96,14 +87,19 @@ public class MetadataImportInvalidHeadingException extends Exception {
* @return The exception message
*/
@Override
public String getMessage() {
if (type == SCHEMA) {
public String getMessage()
{
if (type == SCHEMA)
{
return "Unknown metadata schema in column " + column + ": " + badHeading;
} else if (type == ELEMENT) {
} else if (type == ELEMENT)
{
return "Unknown metadata element in column " + column + ": " + badHeading;
} else if (type == MISSING) {
} else if (type == MISSING)
{
return "Row with missing header: column " + column;
} else {
} else
{
return "Bad metadata declaration in column" + column + ": " + badHeading;
}
}

View File

@@ -9,11 +9,7 @@ package org.dspace.app.checker;
import java.io.FileNotFoundException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import java.util.UUID;
import java.util.*;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
@@ -24,15 +20,7 @@ import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
import org.apache.log4j.Logger;
import org.dspace.checker.BitstreamDispatcher;
import org.dspace.checker.CheckerCommand;
import org.dspace.checker.HandleDispatcher;
import org.dspace.checker.IteratorDispatcher;
import org.dspace.checker.LimitedCountDispatcher;
import org.dspace.checker.LimitedDurationDispatcher;
import org.dspace.checker.ResultsLogger;
import org.dspace.checker.ResultsPruner;
import org.dspace.checker.SimpleDispatcher;
import org.dspace.checker.*;
import org.dspace.content.Bitstream;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.BitstreamService;
@@ -47,7 +35,8 @@ import org.dspace.core.Utils;
* @author Grace Carpenter
* @author Nathan Sarr
*/
public final class ChecksumChecker {
public final class ChecksumChecker
{
private static final Logger LOG = Logger.getLogger(ChecksumChecker.class);
private static final BitstreamService bitstreamService = ContentServiceFactory.getInstance().getBitstreamService();
@@ -55,33 +44,34 @@ public final class ChecksumChecker {
/**
* Blanked off constructor, this class should be used as a command line
* tool.
*
*/
private ChecksumChecker() {
private ChecksumChecker()
{
}
/**
* Command line access to the checksum package.
*
* <dl>
* <dt>-h</dt>
* <dd>Print help on command line options</dd>
* <dt>-l</dt>
* <dd>loop through bitstreams once</dd>
* <dt>-L</dt>
* <dd>loop continuously through bitstreams</dd>
* <dt>-d</dt>
* <dd>specify duration of process run</dd>
* <dt>-b</dt>
* <dd>specify bitstream IDs</dd>
* <dt>-a [handle_id]</dt>
* <dd>check anything by handle</dd>
* <dt>-e</dt>
* <dd>Report only errors in the logs</dd>
* <dt>-p</dt>
* <dd>Don't prune results before running checker</dd>
* </dl>
*
* @param args the command line arguments given
* @param args
* <dl>
* <dt>-h</dt>
* <dd>Print help on command line options</dd>
* <dt>-l</dt>
* <dd>loop through bitstreams once</dd>
* <dt>-L</dt>
* <dd>loop continuously through bitstreams</dd>
* <dt>-d</dt>
* <dd>specify duration of process run</dd>
* <dt>-b</dt>
* <dd>specify bitstream IDs</dd>
* <dt>-a [handle_id]</dt>
* <dd>check anything by handle</dd>
* <dt>-e</dt>
* <dd>Report only errors in the logs</dd>
* <dt>-p</dt>
* <dd>Don't prune results before running checker</dd>
* </dl>
* @throws SQLException if error
*/
public static void main(String[] args) throws SQLException {
@@ -94,7 +84,7 @@ public final class ChecksumChecker {
options.addOption("l", "looping", false, "Loop once through bitstreams");
options.addOption("L", "continuous", false,
"Loop continuously through bitstreams");
"Loop continuously through bitstreams");
options.addOption("h", "help", false, "Help");
options.addOption("d", "duration", true, "Checking duration");
options.addOption("c", "count", true, "Check count");
@@ -102,28 +92,33 @@ public final class ChecksumChecker {
options.addOption("v", "verbose", false, "Report all processing");
OptionBuilder.withArgName("bitstream-ids").hasArgs().withDescription(
"Space separated list of bitstream ids");
"Space separated list of bitstream ids");
Option useBitstreamIds = OptionBuilder.create('b');
options.addOption(useBitstreamIds);
options.addOption("p", "prune", false, "Prune configuration file");
options.addOption(OptionBuilder
.withArgName("prune")
.hasOptionalArgs(1)
.withDescription(
"Prune old results (optionally using specified properties file for configuration)")
.create('p'));
options
.addOption(OptionBuilder
.withArgName("prune")
.hasOptionalArgs(1)
.withDescription(
"Prune old results (optionally using specified properties file for configuration)")
.create('p'));
try {
try
{
line = parser.parse(options, args);
} catch (ParseException e) {
}
catch (ParseException e)
{
LOG.fatal(e);
System.exit(1);
}
// user asks for help
if (line.hasOption('h')) {
if (line.hasOption('h'))
{
printHelp(options);
}
Context context = null;
@@ -132,19 +127,23 @@ public final class ChecksumChecker {
// Prune stage
if (line.hasOption('p')) {
if (line.hasOption('p'))
{
ResultsPruner rp = null;
try {
try
{
rp = (line.getOptionValue('p') != null) ? ResultsPruner
.getPruner(context, line.getOptionValue('p')) : ResultsPruner
.getDefaultPruner(context);
} catch (FileNotFoundException e) {
.getPruner(context, line.getOptionValue('p')) : ResultsPruner
.getDefaultPruner(context);
}
catch (FileNotFoundException e)
{
LOG.error("File not found", e);
System.exit(1);
}
int count = rp.prune();
System.out.println("Pruned " + count
+ " old results from the database.");
+ " old results from the database.");
}
Date processStart = Calendar.getInstance().getTime();
@@ -153,55 +152,77 @@ public final class ChecksumChecker {
// process should loop infinitely through
// most_recent_checksum table
if (line.hasOption('l')) {
if (line.hasOption('l'))
{
dispatcher = new SimpleDispatcher(context, processStart, false);
} else if (line.hasOption('L')) {
}
else if (line.hasOption('L'))
{
dispatcher = new SimpleDispatcher(context, processStart, true);
} else if (line.hasOption('b')) {
}
else if (line.hasOption('b'))
{
// check only specified bitstream(s)
String[] ids = line.getOptionValues('b');
List<Bitstream> bitstreams = new ArrayList<>(ids.length);
for (int i = 0; i < ids.length; i++) {
try {
for (int i = 0; i < ids.length; i++)
{
try
{
bitstreams.add(bitstreamService.find(context, UUID.fromString(ids[i])));
} catch (NumberFormatException nfe) {
}
catch (NumberFormatException nfe)
{
System.err.println("The following argument: " + ids[i]
+ " is not an integer");
+ " is not an integer");
System.exit(0);
}
}
dispatcher = new IteratorDispatcher(bitstreams.iterator());
} else if (line.hasOption('a')) {
}
else if (line.hasOption('a'))
{
dispatcher = new HandleDispatcher(context, line.getOptionValue('a'));
} else if (line.hasOption('d')) {
}
else if (line.hasOption('d'))
{
// run checker process for specified duration
try {
try
{
dispatcher = new LimitedDurationDispatcher(
new SimpleDispatcher(context, processStart, true), new Date(
System.currentTimeMillis()
+ Utils.parseDuration(line
.getOptionValue('d'))));
} catch (Exception e) {
new SimpleDispatcher(context, processStart, true), new Date(
System.currentTimeMillis()
+ Utils.parseDuration(line
.getOptionValue('d'))));
}
catch (Exception e)
{
LOG.fatal("Couldn't parse " + line.getOptionValue('d')
+ " as a duration: ", e);
+ " as a duration: ", e);
System.exit(0);
}
} else if (line.hasOption('c')) {
}
else if (line.hasOption('c'))
{
int count = Integer.valueOf(line.getOptionValue('c'));
// run checker process for specified number of bitstreams
dispatcher = new LimitedCountDispatcher(new SimpleDispatcher(
context, processStart, false), count);
} else {
context, processStart, false), count);
}
else
{
dispatcher = new LimitedCountDispatcher(new SimpleDispatcher(
context, processStart, false), 1);
context, processStart, false), 1);
}
ResultsLogger logger = new ResultsLogger(processStart);
CheckerCommand checker = new CheckerCommand(context);
// verbose reporting
if (line.hasOption('v')) {
if (line.hasOption('v'))
{
checker.setReportVerbose(true);
}
@@ -212,7 +233,7 @@ public final class ChecksumChecker {
context.complete();
context = null;
} finally {
if (context != null) {
if(context != null){
context.abort();
}
}
@@ -223,19 +244,24 @@ public final class ChecksumChecker {
*
* @param options that are available for the user
*/
private static void printHelp(Options options) {
private static void printHelp(Options options)
{
HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("Checksum Checker\n", options);
System.out.println("\nSpecify a duration for checker process, using s(seconds),"
+ "m(minutes), or h(hours): ChecksumChecker -d 30s"
+ " OR ChecksumChecker -d 30m"
+ " OR ChecksumChecker -d 2h");
System.out.println("\nSpecify bitstream IDs: ChecksumChecker -b 13 15 17 20");
System.out
.println("\nSpecify a duration for checker process, using s(seconds),"
+ "m(minutes), or h(hours): ChecksumChecker -d 30s"
+ " OR ChecksumChecker -d 30m"
+ " OR ChecksumChecker -d 2h");
System.out
.println("\nSpecify bitstream IDs: ChecksumChecker -b 13 15 17 20");
System.out.println("\nLoop once through all bitstreams: "
+ "ChecksumChecker -l");
System.out.println("\nLoop continuously through all bitstreams: ChecksumChecker -L");
System.out.println("\nCheck a defined number of bitstreams: ChecksumChecker -c 10");
+ "ChecksumChecker -l");
System.out
.println("\nLoop continuously through all bitstreams: ChecksumChecker -L");
System.out
.println("\nCheck a defined number of bitstreams: ChecksumChecker -c 10");
System.out.println("\nReport all processing (verbose)(default reports only errors): ChecksumChecker -v");
System.out.println("\nDefault (no arguments) is equivalent to '-c 1'");
System.exit(0);

View File

@@ -7,12 +7,12 @@
*/
package org.dspace.app.configuration;
import java.io.File;
import java.net.MalformedURLException;
import org.dspace.kernel.config.SpringLoader;
import org.dspace.services.ConfigurationService;
import java.io.File;
import java.net.MalformedURLException;
/**
* @author Kevin Van de Velde (kevin at atmire dot com)
*/
@@ -32,7 +32,7 @@ public class APISpringLoader implements SpringLoader {
try {
return new String[] {new File(filePath.toString()).toURI().toURL().toString() + XML_SUFFIX};
return new String[]{new File(filePath.toString()).toURI().toURL().toString() + XML_SUFFIX};
} catch (MalformedURLException e) {
return new String[0];
}

View File

@@ -7,17 +7,7 @@
*/
package org.dspace.app.harvest;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Iterator;
import java.util.List;
import java.util.UUID;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.cli.*;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection;
import org.dspace.content.DSpaceObject;
@@ -37,21 +27,27 @@ import org.dspace.harvest.OAIHarvester;
import org.dspace.harvest.factory.HarvestServiceFactory;
import org.dspace.harvest.service.HarvestedCollectionService;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Iterator;
import java.util.List;
import java.util.UUID;
/**
* Test class for harvested collections.
* Test class for harvested collections.
*
* @author Alexey Maslov
*/
public class Harvest {
public class Harvest
{
private static Context context;
private static final HarvestedCollectionService harvestedCollectionService =
HarvestServiceFactory.getInstance().getHarvestedCollectionService();
private static final HarvestedCollectionService harvestedCollectionService = HarvestServiceFactory.getInstance().getHarvestedCollectionService();
private static final EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
private static final CollectionService collectionService =
ContentServiceFactory.getInstance().getCollectionService();
private static final CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService();
public static void main(String[] argv) throws Exception {
public static void main(String[] argv) throws Exception
{
// create an options object and populate it
CommandLineParser parser = new PosixParser();
@@ -67,19 +63,12 @@ public class Harvest {
options.addOption("P", "purge", false, "purge all harvestable collections");
options.addOption("e", "eperson", true,
"eperson");
options.addOption("c", "collection", true,
"harvesting collection (handle or id)");
options.addOption("t", "type", true,
"type of harvesting (0 for none)");
options.addOption("a", "address", true,
"address of the OAI-PMH server");
options.addOption("i", "oai_set_id", true,
"id of the PMH set representing the harvested collection");
options.addOption("m", "metadata_format", true,
"the name of the desired metadata format for harvesting, resolved to namespace and " +
"crosswalk in dspace.cfg");
options.addOption("e", "eperson", true, "eperson");
options.addOption("c", "collection", true, "harvesting collection (handle or id)");
options.addOption("t", "type", true, "type of harvesting (0 for none)");
options.addOption("a", "address", true, "address of the OAI-PMH server");
options.addOption("i", "oai_set_id", true, "id of the PMH set representing the harvested collection");
options.addOption("m", "metadata_format", true, "the name of the desired metadata format for harvesting, resolved to namespace and crosswalk in dspace.cfg");
options.addOption("h", "help", false, "help");
@@ -93,21 +82,27 @@ public class Harvest {
String metadataKey = null;
int harvestType = 0;
if (line.hasOption('h')) {
if (line.hasOption('h'))
{
HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("Harvest\n", options);
System.out.println("\nPING OAI server: Harvest -g -a oai_source -i oai_set_id");
System.out.println(
"RUNONCE harvest with arbitrary options: Harvest -o -e eperson -c collection -t harvest_type -a " +
"oai_source -i oai_set_id -m metadata_format");
System.out.println(
"SETUP a collection for harvesting: Harvest -s -c collection -t harvest_type -a oai_source -i " +
"oai_set_id -m metadata_format");
System.out.println("RUN harvest once: Harvest -r -e eperson -c collection");
System.out.println("START harvest scheduler: Harvest -S");
System.out.println("RESET all harvest status: Harvest -R");
System.out.println("PURGE a collection of items and settings: Harvest -p -e eperson -c collection");
System.out.println("PURGE all harvestable collections: Harvest -P -e eperson");
System.out
.println("\nPING OAI server: Harvest -g -a oai_source -i oai_set_id");
System.out
.println("RUNONCE harvest with arbitrary options: Harvest -o -e eperson -c collection -t harvest_type -a oai_source -i oai_set_id -m metadata_format");
System.out
.println("SETUP a collection for harvesting: Harvest -s -c collection -t harvest_type -a oai_source -i oai_set_id -m metadata_format");
System.out
.println("RUN harvest once: Harvest -r -e eperson -c collection");
System.out
.println("START harvest scheduler: Harvest -S");
System.out
.println("RESET all harvest status: Harvest -R");
System.out
.println("PURGE a collection of items and settings: Harvest -p -e eperson -c collection");
System.out
.println("PURGE all harvestable collections: Harvest -P -e eperson");
System.exit(0);
@@ -148,7 +143,7 @@ public class Harvest {
if (line.hasOption('t')) {
harvestType = Integer.parseInt(line.getOptionValue('t'));
} else {
harvestType = 0;
harvestType = 0;
}
if (line.hasOption('a')) {
oaiSource = line.getOptionValue('a');
@@ -167,49 +162,61 @@ public class Harvest {
// Check our options
if (command == null) {
if (command == null)
{
System.out
.println("Error - no parameters specified (run with -h flag for details)");
.println("Error - no parameters specified (run with -h flag for details)");
System.exit(1);
} else if ("run".equals(command)) {
// Run a single harvest cycle on a collection using saved settings.
if (collection == null || eperson == null) {
}
// Run a single harvest cycle on a collection using saved settings.
else if ("run".equals(command))
{
if (collection == null || eperson == null)
{
System.out
.println("Error - a target collection and eperson must be provided");
.println("Error - a target collection and eperson must be provided");
System.out.println(" (run with -h flag for details)");
System.exit(1);
}
harvester.runHarvest(collection, eperson);
} else if ("start".equals(command)) {
// start the harvest loop
startHarvester();
} else if ("reset".equals(command)) {
// reset harvesting status
resetHarvesting();
} else if ("purgeAll".equals(command)) {
// purge all collections that are set up for harvesting (obviously for testing purposes only)
if (eperson == null) {
}
// start the harvest loop
else if ("start".equals(command))
{
startHarvester();
}
// reset harvesting status
else if ("reset".equals(command))
{
resetHarvesting();
}
// purge all collections that are set up for harvesting (obviously for testing purposes only)
else if ("purgeAll".equals(command))
{
if (eperson == null)
{
System.out
.println("Error - an eperson must be provided");
.println("Error - an eperson must be provided");
System.out.println(" (run with -h flag for details)");
System.exit(1);
}
List<HarvestedCollection> harvestedCollections = harvestedCollectionService.findAll(context);
for (HarvestedCollection harvestedCollection : harvestedCollections) {
System.out.println(
"Purging the following collections (deleting items and resetting harvest status): " +
harvestedCollection
.getCollection().getID().toString());
List<HarvestedCollection> harvestedCollections = harvestedCollectionService.findAll(context);
for (HarvestedCollection harvestedCollection : harvestedCollections)
{
System.out.println("Purging the following collections (deleting items and resetting harvest status): " + harvestedCollection.getCollection().getID().toString());
harvester.purgeCollection(harvestedCollection.getCollection().getID().toString(), eperson);
}
context.complete();
} else if ("purge".equals(command)) {
// Delete all items in a collection. Useful for testing fresh harvests.
if (collection == null || eperson == null) {
}
context.complete();
}
// Delete all items in a collection. Useful for testing fresh harvests.
else if ("purge".equals(command))
{
if (collection == null || eperson == null)
{
System.out
.println("Error - a target collection and eperson must be provided");
.println("Error - a target collection and eperson must be provided");
System.out.println(" (run with -h flag for details)");
System.exit(1);
}
@@ -218,28 +225,35 @@ public class Harvest {
context.complete();
//TODO: implement this... remove all items and remember to unset "last-harvested" settings
} else if ("config".equals(command)) {
// Configure a collection with the three main settings
if (collection == null) {
}
// Configure a collection with the three main settings
else if ("config".equals(command))
{
if (collection == null)
{
System.out.println("Error - a target collection must be provided");
System.out.println(" (run with -h flag for details)");
System.exit(1);
}
if (oaiSource == null || oaiSetID == null) {
if (oaiSource == null || oaiSetID == null)
{
System.out.println("Error - both the OAI server address and OAI set id must be specified");
System.out.println(" (run with -h flag for details)");
System.exit(1);
}
if (metadataKey == null) {
System.out
.println("Error - a metadata key (commonly the prefix) must be specified for this collection");
if (metadataKey == null)
{
System.out.println("Error - a metadata key (commonly the prefix) must be specified for this collection");
System.out.println(" (run with -h flag for details)");
System.exit(1);
}
harvester.configureCollection(collection, harvestType, oaiSource, oaiSetID, metadataKey);
} else if ("ping".equals(command)) {
if (oaiSource == null || oaiSetID == null) {
}
else if ("ping".equals(command))
{
if (oaiSource == null || oaiSetID == null)
{
System.out.println("Error - both the OAI server address and OAI set id must be specified");
System.out.println(" (run with -h flag for details)");
System.exit(1);
@@ -255,70 +269,81 @@ public class Harvest {
*/
private Collection resolveCollection(String collectionID) {
DSpaceObject dso;
Collection targetCollection = null;
DSpaceObject dso;
Collection targetCollection = null;
try {
// is the ID a handle?
if (collectionID != null) {
if (collectionID.indexOf('/') != -1) {
try {
// is the ID a handle?
if (collectionID != null)
{
if (collectionID.indexOf('/') != -1)
{
// string has a / so it must be a handle - try and resolve it
dso = HandleServiceFactory.getInstance().getHandleService().resolveToObject(context, collectionID);
// resolved, now make sure it's a collection
if (dso == null || dso.getType() != Constants.COLLECTION) {
if (dso == null || dso.getType() != Constants.COLLECTION)
{
targetCollection = null;
} else {
}
else
{
targetCollection = (Collection) dso;
}
} else {
// not a handle, try and treat it as an integer collection database ID
System.out.println("Looking up by id: " + collectionID + ", parsed as '" + Integer
.parseInt(collectionID) + "', " + "in context: " + context);
}
// not a handle, try and treat it as an integer collection
// database ID
else
{
System.out.println("Looking up by id: " + collectionID + ", parsed as '" + Integer.parseInt(collectionID) + "', " + "in context: " + context);
targetCollection = collectionService.find(context, UUID.fromString(collectionID));
}
}
// was the collection valid?
if (targetCollection == null) {
if (targetCollection == null)
{
System.out.println("Cannot resolve " + collectionID + " to collection");
System.exit(1);
}
} catch (SQLException se) {
se.printStackTrace();
}
}
catch (SQLException se) {
se.printStackTrace();
}
return targetCollection;
return targetCollection;
}
private void configureCollection(String collectionID, int type, String oaiSource, String oaiSetId,
String mdConfigId) {
System.out.println("Running: configure collection");
private void configureCollection(String collectionID, int type, String oaiSource, String oaiSetId, String mdConfigId) {
System.out.println("Running: configure collection");
Collection collection = resolveCollection(collectionID);
System.out.println(collection.getID());
Collection collection = resolveCollection(collectionID);
System.out.println(collection.getID());
try {
HarvestedCollection hc = harvestedCollectionService.find(context, collection);
if (hc == null) {
hc = harvestedCollectionService.create(context, collection);
}
try {
HarvestedCollection hc = harvestedCollectionService.find(context, collection);
if (hc == null) {
hc = harvestedCollectionService.create(context, collection);
}
context.turnOffAuthorisationSystem();
hc.setHarvestParams(type, oaiSource, oaiSetId, mdConfigId);
hc.setHarvestStatus(HarvestedCollection.STATUS_READY);
context.turnOffAuthorisationSystem();
hc.setHarvestParams(type, oaiSource, oaiSetId, mdConfigId);
hc.setHarvestStatus(HarvestedCollection.STATUS_READY);
harvestedCollectionService.update(context, hc);
context.restoreAuthSystemState();
context.complete();
} catch (Exception e) {
System.out.println("Changes could not be committed");
e.printStackTrace();
System.exit(1);
} finally {
if (context != null) {
context.restoreAuthSystemState();
context.restoreAuthSystemState();
context.complete();
}
catch (Exception e) {
System.out.println("Changes could not be committed");
e.printStackTrace();
System.exit(1);
}
finally {
if (context != null)
{
context.restoreAuthSystemState();
}
}
}
}
@@ -329,47 +354,51 @@ public class Harvest {
* @param email
*/
private void purgeCollection(String collectionID, String email) {
System.out.println(
"Purging collection of all items and resetting last_harvested and harvest_message: " + collectionID);
Collection collection = resolveCollection(collectionID);
System.out.println("Purging collection of all items and resetting last_harvested and harvest_message: " + collectionID);
Collection collection = resolveCollection(collectionID);
try {
EPerson eperson = ePersonService.findByEmail(context, email);
context.setCurrentUser(eperson);
context.turnOffAuthorisationSystem();
try
{
EPerson eperson = ePersonService.findByEmail(context, email);
context.setCurrentUser(eperson);
context.turnOffAuthorisationSystem();
ItemService itemService = ContentServiceFactory.getInstance().getItemService();
Iterator<Item> it = itemService.findByCollection(context, collection);
int i = 0;
while (it.hasNext()) {
i++;
Item item = it.next();
System.out.println("Deleting: " + item.getHandle());
int i=0;
while (it.hasNext()) {
i++;
Item item = it.next();
System.out.println("Deleting: " + item.getHandle());
collectionService.removeItem(context, collection, item);
context.uncacheEntity(item);// Dispatch events every 50 items
if (i % 50 == 0) {
context.dispatchEvents();
i = 0;
}
}
context.uncacheEntity(item);
HarvestedCollection hc = harvestedCollectionService.find(context, collection);
if (hc != null) {
hc.setLastHarvested(null);
// Dispatch events every 50 items
if (i%50 == 0) {
context.dispatchEvents();
i=0;
}
}
HarvestedCollection hc = harvestedCollectionService.find(context, collection);
if (hc != null) {
hc.setLastHarvested(null);
hc.setHarvestMessage("");
hc.setHarvestStatus(HarvestedCollection.STATUS_READY);
hc.setHarvestStartTime(null);
hc.setHarvestStatus(HarvestedCollection.STATUS_READY);
hc.setHarvestStartTime(null);
harvestedCollectionService.update(context, hc);
}
context.restoreAuthSystemState();
}
context.restoreAuthSystemState();
context.dispatchEvents();
} catch (Exception e) {
System.out.println("Changes could not be committed");
e.printStackTrace();
System.exit(1);
} finally {
context.restoreAuthSystemState();
}
}
catch (Exception e) {
System.out.println("Changes could not be committed");
e.printStackTrace();
System.exit(1);
}
finally {
context.restoreAuthSystemState();
}
}
@@ -377,37 +406,41 @@ public class Harvest {
* Run a single harvest cycle on the specified collection under the authorization of the supplied EPerson
*/
private void runHarvest(String collectionID, String email) {
System.out.println("Running: a harvest cycle on " + collectionID);
System.out.println("Running: a harvest cycle on " + collectionID);
System.out.print("Initializing the harvester... ");
OAIHarvester harvester = null;
try {
Collection collection = resolveCollection(collectionID);
HarvestedCollection hc = harvestedCollectionService.find(context, collection);
harvester = new OAIHarvester(context, collection, hc);
System.out.println("success. ");
} catch (HarvestingException hex) {
System.out.print("failed. ");
System.out.println(hex.getMessage());
throw new IllegalStateException("Unable to harvest", hex);
} catch (SQLException se) {
System.out.print("Initializing the harvester... ");
OAIHarvester harvester = null;
try {
Collection collection = resolveCollection(collectionID);
HarvestedCollection hc = harvestedCollectionService.find(context, collection);
harvester = new OAIHarvester(context, collection, hc);
System.out.println("success. ");
}
catch (HarvestingException hex) {
System.out.print("failed. ");
System.out.println(hex.getMessage());
throw new IllegalStateException("Unable to harvest", hex);
} catch (SQLException se) {
System.out.print("failed. ");
System.out.println(se.getMessage());
throw new IllegalStateException("Unable to access database", se);
}
}
try {
// Harvest will not work for an anonymous user
EPerson eperson = ePersonService.findByEmail(context, email);
System.out.println("Harvest started... ");
context.setCurrentUser(eperson);
harvester.runHarvest();
context.complete();
} catch (SQLException e) {
try {
// Harvest will not work for an anonymous user
EPerson eperson = ePersonService.findByEmail(context, email);
System.out.println("Harvest started... ");
context.setCurrentUser(eperson);
harvester.runHarvest();
context.complete();
}
catch (SQLException e) {
throw new IllegalStateException("Failed to run harvester", e);
} catch (AuthorizeException e) {
}
catch (AuthorizeException e) {
throw new IllegalStateException("Failed to run harvester", e);
} catch (IOException e) {
}
catch (IOException e) {
throw new IllegalStateException("Failed to run harvester", e);
}
@@ -415,70 +448,76 @@ public class Harvest {
}
/**
* Resets harvest_status and harvest_start_time flags for all collections that have a row in the
* harvested_collections table
* Resets harvest_status and harvest_start_time flags for all collections that have a row in the harvested_collections table
*/
private static void resetHarvesting() {
System.out.print("Resetting harvest status flag on all collections... ");
System.out.print("Resetting harvest status flag on all collections... ");
try {
try
{
List<HarvestedCollection> harvestedCollections = harvestedCollectionService.findAll(context);
for (HarvestedCollection harvestedCollection : harvestedCollections) {
for (HarvestedCollection harvestedCollection : harvestedCollections)
{
//hc.setHarvestResult(null,"");
harvestedCollection.setHarvestStartTime(null);
harvestedCollection.setHarvestStatus(HarvestedCollection.STATUS_READY);
harvestedCollectionService.update(context, harvestedCollection);
}
System.out.println("success. ");
} catch (Exception ex) {
}
catch (Exception ex) {
System.out.println("failed. ");
ex.printStackTrace();
}
}
}
/**
* Starts up the harvest scheduler. Terminating this process will stop the scheduler.
*/
private static void startHarvester() {
try {
private static void startHarvester()
{
try
{
System.out.print("Starting harvest loop... ");
HarvestServiceFactory.getInstance().getHarvestSchedulingService().startNewScheduler();
System.out.println("running. ");
} catch (Exception ex) {
}
catch (Exception ex) {
ex.printStackTrace();
}
}
}
/**
* See if the responder is alive and working.
*
* @param server address of the responder's host.
* @param set name of an item set.
* @param server address of the responder's host.
* @param set name of an item set.
* @param metadataFormat local prefix name, or null for "dc".
*/
private static void pingResponder(String server, String set, String metadataFormat) {
private static void pingResponder(String server, String set, String metadataFormat)
{
List<String> errors;
System.out.print("Testing basic PMH access: ");
errors = OAIHarvester.verifyOAIharvester(server, set,
(null != metadataFormat) ? metadataFormat : "dc", false);
if (errors.isEmpty()) {
(null != metadataFormat) ? metadataFormat : "dc", false);
if (errors.isEmpty())
System.out.println("OK");
} else {
for (String error : errors) {
else
{
for (String error : errors)
System.err.println(error);
}
}
System.out.print("Testing ORE support: ");
errors = OAIHarvester.verifyOAIharvester(server, set,
(null != metadataFormat) ? metadataFormat : "dc", true);
if (errors.isEmpty()) {
(null != metadataFormat) ? metadataFormat : "dc", true);
if (errors.isEmpty())
System.out.println("OK");
} else {
for (String error : errors) {
else
{
for (String error : errors)
System.err.println(error);
}
}
}
}

View File

@@ -7,17 +7,7 @@
*/
package org.dspace.app.itemexport;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.UUID;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.cli.*;
import org.dspace.app.itemexport.factory.ItemExportServiceFactory;
import org.dspace.app.itemexport.service.ItemExportService;
import org.dspace.content.Collection;
@@ -30,6 +20,8 @@ import org.dspace.core.Context;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.handle.service.HandleService;
import java.util.*;
/**
* Item exporter to create simple AIPs for DSpace content. Currently exports
* individual items, or entire collections. For instructions on use, see
@@ -53,21 +45,17 @@ import org.dspace.handle.service.HandleService;
*/
public class ItemExportCLITool {
protected static ItemExportService itemExportService = ItemExportServiceFactory.getInstance()
.getItemExportService();
protected static ItemExportService itemExportService = ItemExportServiceFactory.getInstance().getItemExportService();
protected static HandleService handleService = HandleServiceFactory.getInstance().getHandleService();
protected static ItemService itemService = ContentServiceFactory.getInstance().getItemService();
protected static CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService();
/**
* Default constructor
*/
private ItemExportCLITool() { }
/*
*
*/
public static void main(String[] argv) throws Exception {
public static void main(String[] argv) throws Exception
{
// create an options object and populate it
CommandLineParser parser = new PosixParser();
@@ -76,11 +64,10 @@ public class ItemExportCLITool {
options.addOption("t", "type", true, "type: COLLECTION or ITEM");
options.addOption("i", "id", true, "ID or handle of thing to export");
options.addOption("d", "dest", true,
"destination where you want items to go");
options.addOption("m", "migrate", false,
"export for migration (remove handle and metadata that will be re-created in new system)");
"destination where you want items to go");
options.addOption("m", "migrate", false, "export for migration (remove handle and metadata that will be re-created in new system)");
options.addOption("n", "number", true,
"sequence number to begin exporting items with");
"sequence number to begin exporting items with");
options.addOption("z", "zip", true, "export as zip file (specify filename e.g. export.zip)");
options.addOption("h", "help", false, "help");
@@ -99,140 +86,175 @@ public class ItemExportCLITool {
Item myItem = null;
Collection mycollection = null;
if (line.hasOption('h')) {
if (line.hasOption('h'))
{
HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("ItemExport\n", options);
System.out
.println("\nfull collection: ItemExport -t COLLECTION -i ID -d dest -n number");
.println("\nfull collection: ItemExport -t COLLECTION -i ID -d dest -n number");
System.out
.println("singleitem: ItemExport -t ITEM -i ID -d dest -n number");
.println("singleitem: ItemExport -t ITEM -i ID -d dest -n number");
System.exit(0);
}
if (line.hasOption('t')) { // type
if (line.hasOption('t')) // type
{
typeString = line.getOptionValue('t');
if ("ITEM".equals(typeString)) {
if ("ITEM".equals(typeString))
{
myType = Constants.ITEM;
} else if ("COLLECTION".equals(typeString)) {
}
else if ("COLLECTION".equals(typeString))
{
myType = Constants.COLLECTION;
}
}
if (line.hasOption('i')) { // id
if (line.hasOption('i')) // id
{
myIDString = line.getOptionValue('i');
}
if (line.hasOption('d')) { // dest
if (line.hasOption('d')) // dest
{
destDirName = line.getOptionValue('d');
}
if (line.hasOption('n')) { // number
if (line.hasOption('n')) // number
{
seqStart = Integer.parseInt(line.getOptionValue('n'));
}
boolean migrate = false;
if (line.hasOption('m')) { // number
if (line.hasOption('m')) // number
{
migrate = true;
}
boolean zip = false;
String zipFileName = "";
if (line.hasOption('z')) {
if (line.hasOption('z'))
{
zip = true;
zipFileName = line.getOptionValue('z');
}
boolean excludeBitstreams = false;
if (line.hasOption('x')) {
excludeBitstreams = true;
if (line.hasOption('x'))
{
excludeBitstreams = true;
}
// now validate the args
if (myType == -1) {
if (myType == -1)
{
System.out
.println("type must be either COLLECTION or ITEM (-h for help)");
.println("type must be either COLLECTION or ITEM (-h for help)");
System.exit(1);
}
if (destDirName == null) {
if (destDirName == null)
{
System.out
.println("destination directory must be set (-h for help)");
.println("destination directory must be set (-h for help)");
System.exit(1);
}
if (seqStart == -1) {
if (seqStart == -1)
{
System.out
.println("sequence start number must be set (-h for help)");
.println("sequence start number must be set (-h for help)");
System.exit(1);
}
if (myIDString == null) {
if (myIDString == null)
{
System.out
.println("ID must be set to either a database ID or a handle (-h for help)");
.println("ID must be set to either a database ID or a handle (-h for help)");
System.exit(1);
}
Context c = new Context(Context.Mode.READ_ONLY);
c.turnOffAuthorisationSystem();
if (myType == Constants.ITEM) {
if (myType == Constants.ITEM)
{
// first, is myIDString a handle?
if (myIDString.indexOf('/') != -1) {
if (myIDString.indexOf('/') != -1)
{
myItem = (Item) handleService.resolveToObject(c, myIDString);
if ((myItem == null) || (myItem.getType() != Constants.ITEM)) {
if ((myItem == null) || (myItem.getType() != Constants.ITEM))
{
myItem = null;
}
} else {
}
else
{
myItem = itemService.find(c, UUID.fromString(myIDString));
}
if (myItem == null) {
if (myItem == null)
{
System.out
.println("Error, item cannot be found: " + myIDString);
.println("Error, item cannot be found: " + myIDString);
}
} else {
if (myIDString.indexOf('/') != -1) {
}
else
{
if (myIDString.indexOf('/') != -1)
{
// has a / must be a handle
mycollection = (Collection) handleService.resolveToObject(c,
myIDString);
myIDString);
// ensure it's a collection
if ((mycollection == null)
|| (mycollection.getType() != Constants.COLLECTION)) {
|| (mycollection.getType() != Constants.COLLECTION))
{
mycollection = null;
}
} else if (myIDString != null) {
}
else if (myIDString != null)
{
mycollection = collectionService.find(c, UUID.fromString(myIDString));
}
if (mycollection == null) {
if (mycollection == null)
{
System.out.println("Error, collection cannot be found: "
+ myIDString);
+ myIDString);
System.exit(1);
}
}
if (zip) {
if (zip)
{
Iterator<Item> items;
if (myItem != null) {
if (myItem != null)
{
List<Item> myItems = new ArrayList<>();
myItems.add(myItem);
items = myItems.iterator();
} else {
}
else
{
System.out.println("Exporting from collection: " + myIDString);
items = itemService.findByCollection(c, mycollection);
}
itemExportService.exportAsZip(c, items, destDirName, zipFileName, seqStart, migrate, excludeBitstreams);
} else {
if (myItem != null) {
}
else
{
if (myItem != null)
{
// it's only a single item
itemExportService
.exportItem(c, Collections.singletonList(myItem).iterator(), destDirName, seqStart, migrate,
excludeBitstreams);
} else {
itemExportService.exportItem(c, Collections.singletonList(myItem).iterator(), destDirName, seqStart, migrate, excludeBitstreams);
}
else
{
System.out.println("Exporting from collection: " + myIDString);
// it's a collection, so do a bunch of items

View File

@@ -10,17 +10,20 @@ package org.dspace.app.itemexport;
/**
* An exception that can be thrown when error occur during item export
*/
public class ItemExportException extends Exception {
public class ItemExportException extends Exception
{
public static final int EXPORT_TOO_LARGE = 0;
private int reason;
public ItemExportException(int r, String message) {
public ItemExportException(int r, String message)
{
super(message);
reason = r;
}
public int getReason() {
public int getReason()
{
return reason;
}
}

View File

@@ -11,8 +11,7 @@ import org.dspace.app.itemexport.service.ItemExportService;
import org.dspace.services.factory.DSpaceServicesFactory;
/**
* Abstract factory to get services for the itemexport package, use ItemExportServiceFactory.getInstance() to
* retrieve an implementation
* Abstract factory to get services for the itemexport package, use ItemExportServiceFactory.getInstance() to retrieve an implementation
*
* @author kevinvandevelde at atmire.com
*/
@@ -20,8 +19,7 @@ public abstract class ItemExportServiceFactory {
public abstract ItemExportService getItemExportService();
public static ItemExportServiceFactory getInstance() {
return DSpaceServicesFactory.getInstance().getServiceManager()
.getServiceByName("itemExportServiceFactory", ItemExportServiceFactory.class);
public static ItemExportServiceFactory getInstance(){
return DSpaceServicesFactory.getInstance().getServiceManager().getServiceByName("itemExportServiceFactory", ItemExportServiceFactory.class);
}
}

View File

@@ -11,8 +11,7 @@ import org.dspace.app.itemexport.service.ItemExportService;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Factory implementation to get services for the itemexport package, use ItemExportServiceFactory.getInstance() to
* retrieve an implementation
* Factory implementation to get services for the itemexport package, use ItemExportServiceFactory.getInstance() to retrieve an implementation
*
* @author kevinvandevelde at atmire.com
*/

View File

@@ -7,17 +7,17 @@
*/
package org.dspace.app.itemexport.service;
import java.io.InputStream;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import javax.mail.MessagingException;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import javax.mail.MessagingException;
import java.io.InputStream;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
/**
* Item exporter to create simple AIPs for DSpace content. Currently exports
* individual items, or entire collections. For instructions on use, see
@@ -47,109 +47,122 @@ public interface ItemExportService {
public static final String COMPRESSED_EXPORT_MIME_TYPE = "application/zip";
public void exportItem(Context c, Iterator<Item> i,
String destDirName, int seqStart, boolean migrate,
boolean excludeBitstreams) throws Exception;
String destDirName, int seqStart, boolean migrate,
boolean excludeBitstreams) throws Exception;
/**
* Method to perform an export and save it as a zip file.
*
* @param context The DSpace Context
* @param items The items to export
* @param destDirName The directory to save the export in
* @param zipFileName The name to save the zip file as
* @param seqStart The first number in the sequence
* @param migrate Whether to use the migrate option or not
* @param context The DSpace Context
* @param items The items to export
* @param destDirName The directory to save the export in
* @param zipFileName The name to save the zip file as
* @param seqStart The first number in the sequence
* @param migrate Whether to use the migrate option or not
* @param excludeBitstreams Whether to exclude bitstreams or not
* @throws Exception if error
*/
public void exportAsZip(Context context, Iterator<Item> items,
String destDirName, String zipFileName,
int seqStart, boolean migrate,
boolean excludeBitstreams) throws Exception;
String destDirName, String zipFileName,
int seqStart, boolean migrate,
boolean excludeBitstreams) throws Exception;
/**
* Convenience methot to create export a single Community, Collection, or
* Item
*
* @param dso - the dspace object to export
* @param context - the dspace context
* @param dso
* - the dspace object to export
* @param context
* - the dspace context
* @param migrate Whether to use the migrate option or not
* @throws Exception if error
*/
public void createDownloadableExport(DSpaceObject dso,
Context context, boolean migrate) throws Exception;
Context context, boolean migrate) throws Exception;
/**
* Convenience method to export a List of dspace objects (Community,
* Collection or Item)
*
* @param dsObjects - List containing dspace objects
* @param context - the dspace context
* @param migrate Whether to use the migrate option or not
* @param dsObjects
* - List containing dspace objects
* @param context
* - the dspace context
* @param migrate Whether to use the migrate option or not
* @throws Exception if error
*/
public void createDownloadableExport(List<DSpaceObject> dsObjects,
Context context, boolean migrate) throws Exception;
Context context, boolean migrate) throws Exception;
/**
* Convenience methot to create export a single Community, Collection, or
* Item
*
* @param dso - the dspace object to export
* @param context - the dspace context
* @param additionalEmail - cc email to use
* @param migrate Whether to use the migrate option or not
* @param dso
* - the dspace object to export
* @param context
* - the dspace context
* @param additionalEmail
* - cc email to use
* @param migrate Whether to use the migrate option or not
* @throws Exception if error
*/
public void createDownloadableExport(DSpaceObject dso,
Context context, String additionalEmail, boolean migrate) throws Exception;
Context context, String additionalEmail, boolean migrate) throws Exception;
/**
* Convenience method to export a List of dspace objects (Community,
* Collection or Item)
*
* @param dsObjects - List containing dspace objects
* @param context - the dspace context
* @param additionalEmail - cc email to use
* @param migrate Whether to use the migrate option or not
* @param dsObjects
* - List containing dspace objects
* @param context
* - the dspace context
* @param additionalEmail
* - cc email to use
* @param migrate Whether to use the migrate option or not
* @throws Exception if error
*/
public void createDownloadableExport(List<DSpaceObject> dsObjects,
Context context, String additionalEmail, boolean migrate) throws Exception;
Context context, String additionalEmail, boolean migrate) throws Exception;
/**
* Create a file name based on the date and eperson
*
* @param type Type of object (as string)
* @param eperson - eperson who requested export and will be able to download it
* @param date - the date the export process was created
* @param type Type of object (as string)
* @param eperson
* - eperson who requested export and will be able to download it
* @param date
* - the date the export process was created
* @return String representing the file name in the form of
* 'export_yyy_MMM_dd_count_epersonID'
* 'export_yyy_MMM_dd_count_epersonID'
* @throws Exception if error
*/
public String assembleFileName(String type, EPerson eperson,
Date date) throws Exception;
Date date) throws Exception;
/**
* Use config file entry for org.dspace.app.itemexport.download.dir and id
* of the eperson to create a download directory name
*
* @param ePerson - the eperson who requested export archive
* @param ePerson
* - the eperson who requested export archive
* @return String representing a directory in the form of
* org.dspace.app.itemexport.download.dir/epersonID
* org.dspace.app.itemexport.download.dir/epersonID
* @throws Exception if error
*/
public String getExportDownloadDirectory(EPerson ePerson)
throws Exception;
throws Exception;
/**
* Returns config file entry for org.dspace.app.itemexport.work.dir
*
* @return String representing config file entry for
* org.dspace.app.itemexport.work.dir
* org.dspace.app.itemexport.work.dir
* @throws Exception if error
*/
public String getExportWorkDirectory() throws Exception;
@@ -157,43 +170,49 @@ public interface ItemExportService {
/**
* Used to read the export archived. Inteded for download.
*
* @param fileName the name of the file to download
* @param eperson the eperson requesting the download
* @param fileName
* the name of the file to download
* @param eperson
* the eperson requesting the download
* @return an input stream of the file to be downloaded
* @throws Exception if error
*/
public InputStream getExportDownloadInputStream(String fileName,
EPerson eperson) throws Exception;
EPerson eperson) throws Exception;
/**
* Get the file size of the export archive represented by the file name.
*
* @param context DSpace context
* @param fileName name of the file to get the size.
* @return size as long
* @param context DSpace context
* @param fileName
* name of the file to get the size.
* @throws Exception if error
* @return size as long
*/
public long getExportFileSize(Context context, String fileName) throws Exception;
/**
* Get the last modified date of the export archive represented by the file name.
*
* @param context DSpace context
* @param fileName name of the file to get the size.
* @param context DSpace context
* @param fileName
* name of the file to get the size.
* @return date as long
* @throws Exception if error
* @see java.io.File#lastModified()
* @throws Exception if error
*/
public long getExportFileLastModified(Context context, String fileName)
throws Exception;
throws Exception;
/**
* The file name of the export archive contains the eperson id of the person
* who created it When requested for download this method can check if the
* person requesting it is the same one that created it
*
* @param context dspace context
* @param fileName the file name to check auths for
* @param context
* dspace context
* @param fileName
* the file name to check auths for
* @return true if it is the same person false otherwise
*/
public boolean canDownload(Context context, String fileName);
@@ -204,18 +223,19 @@ public interface ItemExportService {
*
* @param eperson EPerson object
* @return a list of file names representing export archives that have been
* processed
* processed
* @throws Exception if error
*/
public List<String> getExportsAvailable(EPerson eperson)
throws Exception;
throws Exception;
/**
* A clean up method that is ran before a new export archive is created. It
* uses the config file entry 'org.dspace.app.itemexport.life.span.hours' to
* determine if the current exports are too old and need pruging
*
* @param eperson - the eperson to clean up
* @param eperson
* - the eperson to clean up
* @throws Exception if error
*/
public void deleteOldExportArchives(EPerson eperson) throws Exception;
@@ -236,14 +256,17 @@ public interface ItemExportService {
* communication with email instead. Send a success email once the export
* archive is complete and ready for download
*
* @param context - the current Context
* @param eperson - eperson to send the email to
* @param fileName - the file name to be downloaded. It is added to the url in
* the email
* @param context
* - the current Context
* @param eperson
* - eperson to send the email to
* @param fileName
* - the file name to be downloaded. It is added to the url in
* the email
* @throws MessagingException if error
*/
public void emailSuccessMessage(Context context, EPerson eperson,
String fileName) throws MessagingException;
String fileName) throws MessagingException;
/**
* Since the archive is created in a new thread we are unable to communicate
@@ -251,18 +274,19 @@ public interface ItemExportService {
* communication with email instead. Send an error email if the export
* archive fails
*
* @param eperson - EPerson to send the error message to
* @param error - the error message
* @param eperson
* - EPerson to send the error message to
* @param error
* - the error message
* @throws MessagingException if error
*/
public void emailErrorMessage(EPerson eperson, String error)
throws MessagingException;
throws MessagingException;
/**
* Zip source to target
*
* @param strSource source file
* @param target target file
* @param target target file
* @throws Exception if error
*/
public void zip(String strSource, String target) throws Exception;

View File

@@ -7,100 +7,99 @@
*/
package org.dspace.app.itemimport;
import gr.ekt.bte.core.DataLoader;
import gr.ekt.bte.core.TransformationEngine;
import gr.ekt.bte.dataloader.FileDataLoader;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import gr.ekt.bte.core.DataLoader;
import gr.ekt.bte.core.TransformationEngine;
import gr.ekt.bte.dataloader.FileDataLoader;
/**
* This class acts as a Service in the procedure to batch import using the Biblio-Transformation-Engine
*/
public class BTEBatchImportService {
public class BTEBatchImportService
{
TransformationEngine transformationEngine;
TransformationEngine transformationEngine;
Map<String, DataLoader> dataLoaders = new HashMap<String, DataLoader>();
Map<String, String> outputMap = new HashMap<String, String>();
Map<String, String> outputMap = new HashMap<String,String>();
/**
* Default constructor
*/
public BTEBatchImportService() {
public BTEBatchImportService()
{
super();
}
/**
* Setter method for dataLoaders parameter
*
* @param dataLoaders map of data loaders
*/
public void setDataLoaders(Map<String, DataLoader> dataLoaders) {
public void setDataLoaders(Map<String, DataLoader> dataLoaders)
{
this.dataLoaders = dataLoaders;
}
/**
* Get data loaders
*
* @return the map of DataLoaders
*/
public Map<String, DataLoader> getDataLoaders() {
public Map<String, DataLoader> getDataLoaders()
{
return dataLoaders;
}
/**
* Get output map
*
* @return the outputMapping
*/
public Map<String, String> getOutputMap() {
return outputMap;
}
public Map<String, String> getOutputMap() {
return outputMap;
}
/**
* Setter method for the outputMapping
*
* @param outputMap the output mapping
*/
public void setOutputMap(Map<String, String> outputMap) {
this.outputMap = outputMap;
}
/**
* Setter method for the outputMapping
* @param outputMap the output mapping
*/
public void setOutputMap(Map<String, String> outputMap) {
this.outputMap = outputMap;
}
/**
* Get transformation engine
*
* @return transformation engine
*/
public TransformationEngine getTransformationEngine() {
return transformationEngine;
}
/**
* Get transformation engine
* @return transformation engine
*/
public TransformationEngine getTransformationEngine() {
return transformationEngine;
}
/**
* set transformation engine
*
* @param transformationEngine transformation engine
*/
public void setTransformationEngine(TransformationEngine transformationEngine) {
this.transformationEngine = transformationEngine;
}
/**
* set transformation engine
* @param transformationEngine transformation engine
*/
public void setTransformationEngine(TransformationEngine transformationEngine) {
this.transformationEngine = transformationEngine;
}
/**
* Getter of file data loaders
*
* @return List of file data loaders
*/
public List<String> getFileDataLoaders() {
List<String> result = new ArrayList<String>();
/**
* Getter of file data loaders
* @return List of file data loaders
*/
public List<String> getFileDataLoaders(){
List<String> result = new ArrayList<String>();
for (String key : dataLoaders.keySet()) {
DataLoader dl = dataLoaders.get(key);
if (dl instanceof FileDataLoader) {
result.add(key);
}
}
return result;
}
for (String key : dataLoaders.keySet()){
DataLoader dl = dataLoaders.get(key);
if (dl instanceof FileDataLoader){
result.add(key);
}
}
return result;
}
}

View File

@@ -20,210 +20,198 @@ import java.util.List;
/**
* @author kstamatis
*
*/
public class BatchUpload {
private Date date;
private File dir;
private boolean successful;
private int itemsImported;
private int totalItems = 0;
private List<String> handlesImported = new ArrayList<String>();
private String errorMsg = "";
private String errorMsgHTML = "";
private Date date;
private File dir;
private boolean successful;
private int itemsImported;
private int totalItems = 0;
private List<String> handlesImported = new ArrayList<String>();
private String errorMsg = "";
private String errorMsgHTML = "";
/**
* Initialize with directory
*
* @param dirPath directory path
*/
public BatchUpload(String dirPath) {
/**
* Initialize with directory
* @param dirPath directory path
*/
public BatchUpload(String dirPath) {
this.initializeWithFile(new File(dirPath));
this.initializeWithFile(new File(dirPath));
}
}
/**
* Initialize with directory
*
* @param dir directory path
*/
public BatchUpload(File dir) {
/**
* Initialize with directory
* @param dir directory path
*/
public BatchUpload(File dir) {
this.initializeWithFile(dir);
this.initializeWithFile(dir);
}
}
/**
* Initialize with directory
*
* @param dir directory path
*/
private void initializeWithFile(File dir) {
/**
* Initialize with directory
* @param dir directory path
*/
private void initializeWithFile(File dir){
this.dir = dir;
this.dir = dir;
String dirName = dir.getName();
long timeMillis = Long.parseLong(dirName);
Calendar calendar = new GregorianCalendar();
calendar.setTimeInMillis(timeMillis);
this.date = calendar.getTime();
String dirName = dir.getName();
long timeMillis = Long.parseLong(dirName);
Calendar calendar = new GregorianCalendar();
calendar.setTimeInMillis(timeMillis);
this.date = calendar.getTime();
try {
this.itemsImported = countLines(dir + File.separator + "mapfile");
} catch (IOException e) {
e.printStackTrace();
}
try {
this.itemsImported = countLines(dir + File.separator + "mapfile");
} catch (IOException e) {
e.printStackTrace();
}
for (File file : dir.listFiles()) {
if (file.isDirectory()) {
this.totalItems = file.list().length;
}
}
for (File file : dir.listFiles()){
if (file.isDirectory()){
this.totalItems = file.list().length;
}
}
this.successful = this.totalItems == this.itemsImported;
this.successful = this.totalItems == this.itemsImported;
//Parse possible error message
//Parse possible error message
File errorFile = new File(dir + File.separator + "error.txt");
if (errorFile.exists()) {
try {
readFile(dir + File.separator + "error.txt");
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
File errorFile = new File(dir + File.separator + "error.txt");
if (errorFile.exists()){
try {
readFile(dir + File.separator + "error.txt");
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
/**
* Count lines in file
*
* @param filename file name
* @return lines in file
* @throws IOException if IO error
*/
private int countLines(String filename) throws IOException {
LineNumberReader reader = new LineNumberReader(new FileReader(filename));
int cnt = 0;
String lineRead = "";
while ((lineRead = reader.readLine()) != null) {
String[] parts = lineRead.split(" ");
if (parts.length > 1) {
handlesImported.add(parts[1].trim());
} else {
handlesImported.add(lineRead);
}
}
/**
* Count lines in file
* @param filename file name
* @return lines in file
* @throws IOException if IO error
*/
private int countLines(String filename) throws IOException {
LineNumberReader reader = new LineNumberReader(new FileReader(filename));
int cnt = 0;
String lineRead = "";
while ((lineRead = reader.readLine()) != null) {
String[] parts = lineRead.split(" ");
if (parts.length > 1)
handlesImported.add(parts[1].trim());
else
handlesImported.add(lineRead);
}
cnt = reader.getLineNumber();
reader.close();
return cnt;
}
cnt = reader.getLineNumber();
reader.close();
return cnt;
}
/**
* Read a file
*
* @param filename file name
* @throws IOException if IO error
*/
private void readFile(String filename) throws IOException {
LineNumberReader reader = new LineNumberReader(new FileReader(filename));
String lineRead = "";
while ((lineRead = reader.readLine()) != null) {
this.errorMsg += lineRead + "\n";
/**
* Read a file
* @param filename file name
* @throws IOException if IO error
*/
private void readFile(String filename) throws IOException {
LineNumberReader reader = new LineNumberReader(new FileReader(filename));
String lineRead = "";
while ((lineRead = reader.readLine()) != null) {
this.errorMsg += lineRead + "\n";
if (lineRead.startsWith("\tat ")) {
this.errorMsgHTML += "<span class=\"batchimport-error-tab\">" + lineRead + "</span><br/>";
} else if (lineRead.startsWith("Caused by")) {
this.errorMsgHTML += "<span class=\"batchimport-error-caused\">" + lineRead + "</span><br/>";
} else {
this.errorMsgHTML += lineRead + "<br/>";
}
}
reader.close();
}
if (lineRead.startsWith("\tat ")){
this.errorMsgHTML += "<span class=\"batchimport-error-tab\">" + lineRead + "</span><br/>";
}
else if (lineRead.startsWith("Caused by")){
this.errorMsgHTML += "<span class=\"batchimport-error-caused\">" + lineRead + "</span><br/>";
}
else {
this.errorMsgHTML += lineRead + "<br/>";
}
}
reader.close();
}
/**
* Get date
*
* @return Date
*/
public Date getDate() {
return date;
}
/**
* Get date
* @return Date
*/
public Date getDate() {
return date;
}
/**
* Get path to directory
*
* @return directory
*/
public File getDir() {
return dir;
}
/**
* Get path to directory
* @return directory
*/
public File getDir() {
return dir;
}
/**
* Whether successulf
*
* @return true or false
*/
public boolean isSuccessful() {
return successful;
}
/**
* Whether successulf
* @return true or false
*/
public boolean isSuccessful() {
return successful;
}
/**
* Get items imported
*
* @return number of items
*/
public int getItemsImported() {
return itemsImported;
}
/**
* Get items imported
* @return number of items
*/
public int getItemsImported() {
return itemsImported;
}
/**
* Get total items
*
* @return total
*/
public int getTotalItems() {
return totalItems;
}
/**
* Get total items
* @return total
*/
public int getTotalItems() {
return totalItems;
}
/**
* Get formatted date (DD/MM/YY)
*
* @return date as string
*/
public String getDateFormatted() {
SimpleDateFormat df = new SimpleDateFormat("dd/MM/yyyy - HH:mm");
/**
* Get formatted date (DD/MM/YY)
* @return date as string
*/
public String getDateFormatted(){
SimpleDateFormat df = new SimpleDateFormat("dd/MM/yyyy - HH:mm");
return df.format(date);
}
return df.format(date);
}
/**
* Get handles of imported files
*
* @return list of handles
*/
public List<String> getHandlesImported() {
return handlesImported;
}
/**
* Get handles of imported files
* @return list of handles
*/
public List<String> getHandlesImported() {
return handlesImported;
}
/**
* Get error message
*
* @return error message
*/
public String getErrorMsg() {
return errorMsg;
}
/**
* Get error message
* @return error message
*/
public String getErrorMsg() {
return errorMsg;
}
/**
* Get error message as HTML
*
* @return error message string as HTML
*/
public String getErrorMsgHTML() {
return errorMsgHTML;
}
/**
* Get error message as HTML
* @return error message string as HTML
*/
public String getErrorMsgHTML() {
return errorMsgHTML;
}
}

View File

@@ -7,17 +7,7 @@
*/
package org.dspace.app.itemimport;
import java.io.File;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.UUID;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.cli.*;
import org.dspace.app.itemimport.factory.ItemImportServiceFactory;
import org.dspace.app.itemimport.service.ItemImportService;
import org.dspace.content.Collection;
@@ -31,6 +21,12 @@ import org.dspace.eperson.service.EPersonService;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.handle.service.HandleService;
import java.io.File;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.UUID;
/**
* Import items into DSpace. The conventional use is upload files by copying
* them. DSpace writes the item's bitstreams into its assetstore. Metadata is
@@ -51,17 +47,12 @@ public class ItemImportCLITool {
private static boolean template = false;
private static final CollectionService collectionService = ContentServiceFactory.getInstance()
.getCollectionService();
private static final CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService();
private static final EPersonService epersonService = EPersonServiceFactory.getInstance().getEPersonService();
private static final HandleService handleService = HandleServiceFactory.getInstance().getHandleService();
/**
* Default constructor
*/
private ItemImportCLITool() { }
public static void main(String[] argv) throws Exception {
public static void main(String[] argv) throws Exception
{
Date startTime = new Date();
int status = 0;
@@ -75,24 +66,24 @@ public class ItemImportCLITool {
options.addOption("b", "add-bte", false, "add items to DSpace via Biblio-Transformation-Engine (BTE)");
options.addOption("r", "replace", false, "replace items in mapfile");
options.addOption("d", "delete", false,
"delete items listed in mapfile");
"delete items listed in mapfile");
options.addOption("i", "inputtype", true, "input type in case of BTE import");
options.addOption("s", "source", true, "source of items (directory)");
options.addOption("z", "zip", true, "name of zip file");
options.addOption("c", "collection", true,
"destination collection(s) Handle or database ID");
"destination collection(s) Handle or database ID");
options.addOption("m", "mapfile", true, "mapfile items in mapfile");
options.addOption("e", "eperson", true,
"email of eperson doing importing");
"email of eperson doing importing");
options.addOption("w", "workflow", false,
"send submission through collection's workflow");
"send submission through collection's workflow");
options.addOption("n", "notify", false,
"if sending submissions through the workflow, send notification emails");
"if sending submissions through the workflow, send notification emails");
options.addOption("t", "test", false,
"test run - do not actually import items");
"test run - do not actually import items");
options.addOption("p", "template", false, "apply template");
options.addOption("R", "resume", false,
"resume a failed import (add only)");
"resume a failed import (add only)");
options.addOption("q", "quiet", false, "don't display metadata");
options.addOption("h", "help", false, "help");
@@ -115,19 +106,15 @@ public class ItemImportCLITool {
HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("ItemImport\n", options);
System.out
.println("\nadding items: ItemImport -a -e eperson -c collection -s sourcedir -m mapfile");
.println("\nadding items: ItemImport -a -e eperson -c collection -s sourcedir -m mapfile");
System.out
.println(
"\nadding items from zip file: ItemImport -a -e eperson -c collection -s sourcedir -z " +
"filename.zip -m mapfile");
.println("\nadding items from zip file: ItemImport -a -e eperson -c collection -s sourcedir -z filename.zip -m mapfile");
System.out
.println("replacing items: ItemImport -r -e eperson -c collection -s sourcedir -m mapfile");
.println("replacing items: ItemImport -r -e eperson -c collection -s sourcedir -m mapfile");
System.out
.println("deleting items: ItemImport -d -e eperson -m mapfile");
.println("deleting items: ItemImport -d -e eperson -m mapfile");
System.out
.println(
"If multiple collections are specified, the first collection will be the one that owns the " +
"item.");
.println("If multiple collections are specified, the first collection will be the one that owns the item.");
System.exit(0);
}
@@ -168,26 +155,30 @@ public class ItemImportCLITool {
template = true;
}
if (line.hasOption('s')) { // source
if (line.hasOption('s')) // source
{
sourcedir = line.getOptionValue('s');
}
if (line.hasOption('m')) { // mapfile
if (line.hasOption('m')) // mapfile
{
mapfile = line.getOptionValue('m');
}
if (line.hasOption('e')) { // eperson
if (line.hasOption('e')) // eperson
{
eperson = line.getOptionValue('e');
}
if (line.hasOption('c')) { // collections
if (line.hasOption('c')) // collections
{
collections = line.getOptionValues('c');
}
if (line.hasOption('R')) {
isResume = true;
System.out
.println("**Resume import** - attempting to import items not already imported");
.println("**Resume import** - attempting to import items not already imported");
}
if (line.hasOption('q')) {
@@ -207,26 +198,26 @@ public class ItemImportCLITool {
// must have a command set
if (command == null) {
System.out
.println("Error - must run with either add, replace, or remove (run with -h flag for details)");
.println("Error - must run with either add, replace, or remove (run with -h flag for details)");
System.exit(1);
} else if ("add".equals(command) || "replace".equals(command)) {
if (sourcedir == null) {
System.out
.println("Error - a source directory containing items must be set");
.println("Error - a source directory containing items must be set");
System.out.println(" (run with -h flag for details)");
System.exit(1);
}
if (mapfile == null) {
System.out
.println("Error - a map file to hold importing results must be specified");
.println("Error - a map file to hold importing results must be specified");
System.out.println(" (run with -h flag for details)");
System.exit(1);
}
if (eperson == null) {
System.out
.println("Error - an eperson to do the importing must be specified");
.println("Error - an eperson to do the importing must be specified");
System.out.println(" (run with -h flag for details)");
System.exit(1);
}
@@ -236,19 +227,18 @@ public class ItemImportCLITool {
commandLineCollections = false;
}
} else if ("add-bte".equals(command)) {
//Source dir can be null, the user can specify the parameters for his loader in the Spring XML
// configuration file
//Source dir can be null, the user can specify the parameters for his loader in the Spring XML configuration file
if (mapfile == null) {
System.out
.println("Error - a map file to hold importing results must be specified");
.println("Error - a map file to hold importing results must be specified");
System.out.println(" (run with -h flag for details)");
System.exit(1);
}
if (eperson == null) {
System.out
.println("Error - an eperson to do the importing must be specified");
.println("Error - an eperson to do the importing must be specified");
System.out.println(" (run with -h flag for details)");
System.exit(1);
}
@@ -260,16 +250,14 @@ public class ItemImportCLITool {
if (bteInputType == null) {
System.out
.println(
"Error - an input type (tsv, csv, ris, endnote, bibtex or any other type you have " +
"specified in BTE Spring XML configuration file) must be specified");
.println("Error - an input type (tsv, csv, ris, endnote, bibtex or any other type you have specified in BTE Spring XML configuration file) must be specified");
System.out.println(" (run with -h flag for details)");
System.exit(1);
}
} else if ("delete".equals(command)) {
if (eperson == null) {
System.out
.println("Error - an eperson to do the importing must be specified");
.println("Error - an eperson to do the importing must be specified");
System.exit(1);
}
@@ -282,7 +270,7 @@ public class ItemImportCLITool {
// can only resume for adds
if (isResume && !"add".equals(command) && !"add-bte".equals(command)) {
System.out
.println("Error - resume option only works with the --add or the --add-bte commands");
.println("Error - resume option only works with the --add or the --add-bte commands");
System.exit(1);
}
@@ -292,9 +280,9 @@ public class ItemImportCLITool {
if (!isResume && "add".equals(command) && myFile.exists()) {
System.out.println("Error - the mapfile " + mapfile
+ " already exists.");
+ " already exists.");
System.out
.println("Either delete it or use --resume if attempting to resume an aborted import.");
.println("Either delete it or use --resume if attempting to resume an aborted import.");
System.exit(1);
}
@@ -342,22 +330,24 @@ public class ItemImportCLITool {
// string has a / so it must be a handle - try and resolve
// it
mycollections.add((Collection) handleService
.resolveToObject(c, collections[i]));
.resolveToObject(c, collections[i]));
// resolved, now make sure it's a collection
if ((mycollections.get(i) == null)
|| (mycollections.get(i).getType() != Constants.COLLECTION)) {
|| (mycollections.get(i).getType() != Constants.COLLECTION)) {
mycollections.set(i, null);
}
} else if (collections[i] != null) {
// not a handle, try and treat it as an integer collection database ID
}
// not a handle, try and treat it as an integer collection
// database ID
else if (collections[i] != null) {
mycollections.set(i, collectionService.find(c, UUID.fromString(collections[i])));
}
// was the collection valid?
if (mycollections.get(i) == null) {
throw new IllegalArgumentException("Cannot resolve "
+ collections[i] + " to collection");
+ collections[i] + " to collection");
}
// print progress info
@@ -368,7 +358,7 @@ public class ItemImportCLITool {
}
System.out.println(owningPrefix + " Collection: "
+ mycollections.get(i).getName());
+ mycollections.get(i).getName());
}
} // end of validating collections
@@ -404,13 +394,11 @@ public class ItemImportCLITool {
try {
if (zip) {
System.gc();
System.out.println(
"Deleting temporary zip directory: " + myloader.getTempWorkDirFile().getAbsolutePath());
System.out.println("Deleting temporary zip directory: " + myloader.getTempWorkDirFile().getAbsolutePath());
myloader.cleanupZipTemp();
}
} catch (Exception ex) {
System.out.println("Unable to delete temporary zip archive location: " + myloader.getTempWorkDirFile()
.getAbsolutePath());
System.out.println("Unable to delete temporary zip archive location: " + myloader.getTempWorkDirFile().getAbsolutePath());
}
@@ -421,9 +409,7 @@ public class ItemImportCLITool {
Date endTime = new Date();
System.out.println("Started: " + startTime.getTime());
System.out.println("Ended: " + endTime.getTime());
System.out.println(
"Elapsed time: " + ((endTime.getTime() - startTime.getTime()) / 1000) + " secs (" + (endTime
.getTime() - startTime.getTime()) + " msecs)");
System.out.println("Elapsed time: " + ((endTime.getTime() - startTime.getTime()) / 1000) + " secs (" + (endTime.getTime() - startTime.getTime()) + " msecs)");
}
System.exit(status);

View File

@@ -11,8 +11,7 @@ import org.dspace.app.itemimport.service.ItemImportService;
import org.dspace.services.factory.DSpaceServicesFactory;
/**
* Abstract factory to get services for the itemimport package, use ItemImportService.getInstance() to retrieve an
* implementation
* Abstract factory to get services for the itemimport package, use ItemImportService.getInstance() to retrieve an implementation
*
* @author kevinvandevelde at atmire.com
*/
@@ -20,8 +19,7 @@ public abstract class ItemImportServiceFactory {
public abstract ItemImportService getItemImportService();
public static ItemImportServiceFactory getInstance() {
return DSpaceServicesFactory.getInstance().getServiceManager()
.getServiceByName("itemImportServiceFactory", ItemImportServiceFactory.class);
public static ItemImportServiceFactory getInstance(){
return DSpaceServicesFactory.getInstance().getServiceManager().getServiceByName("itemImportServiceFactory", ItemImportServiceFactory.class);
}
}

View File

@@ -11,8 +11,7 @@ import org.dspace.app.itemimport.service.ItemImportService;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Factory implementation to get services for the itemimport package, use ItemImportService.getInstance() to retrieve
* an implementation
* Factory implementation to get services for the itemimport package, use ItemImportService.getInstance() to retrieve an implementation
*
* @author kevinvandevelde at atmire.com
*/

View File

@@ -7,16 +7,16 @@
*/
package org.dspace.app.itemimport.service;
import java.io.File;
import java.io.IOException;
import java.util.List;
import javax.mail.MessagingException;
import org.dspace.app.itemimport.BatchUpload;
import org.dspace.content.Collection;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import javax.mail.MessagingException;
import java.io.File;
import java.io.IOException;
import java.util.List;
/**
* Import items into DSpace. The conventional use is upload files by copying
* them. DSpace writes the item's bitstreams into its assetstore. Metadata is
@@ -37,32 +37,30 @@ public interface ItemImportService {
/**
* @param c DSpace Context
*
* @param c DSpace Context
* @param mycollections List of Collections
* @param sourceDir source location
* @param mapFile map file
* @param template whether to use template item
* @param sourceDir source location
* @param mapFile map file
* @param template whether to use template item
* @throws Exception if error
*/
public void addItemsAtomic(Context c, List<Collection> mycollections, String sourceDir, String mapFile,
boolean template) throws Exception;
public void addItemsAtomic(Context c, List<Collection> mycollections, String sourceDir, String mapFile, boolean template) throws Exception;
/**
* Add items
*
* @param c DSpace Context
* @param c DSpace Context
* @param mycollections List of Collections
* @param sourceDir source location
* @param mapFile map file
* @param template whether to use template item
* @param sourceDir source location
* @param mapFile map file
* @param template whether to use template item
* @throws Exception if error
*/
public void addItems(Context c, List<Collection> mycollections,
String sourceDir, String mapFile, boolean template) throws Exception;
String sourceDir, String mapFile, boolean template) throws Exception;
/**
* Unzip a file
*
* @param zipfile file
* @return unzip location
* @throws IOException if error
@@ -71,7 +69,6 @@ public interface ItemImportService {
/**
* Unzip a file to a destination
*
* @param zipfile file
* @param destDir destination directory
* @return unzip location
@@ -81,8 +78,7 @@ public interface ItemImportService {
/**
* Unzip a file in a specific source directory
*
* @param sourcedir source directory
* @param sourcedir source directory
* @param zipfilename file name
* @return unzip location
* @throws IOException if error
@@ -90,19 +86,18 @@ public interface ItemImportService {
public String unzip(String sourcedir, String zipfilename) throws IOException;
/**
* Given a public URL to a zip file that has the Simple Archive Format, this method imports the contents to DSpace
*
* @param url The public URL of the zip file
* Given a public URL to a zip file that has the Simple Archive Format, this method imports the contents to DSpace
* @param url The public URL of the zip file
* @param owningCollection The owning collection the items will belong to
* @param collections The collections the created items will be inserted to, apart from the owning one
* @param resumeDir In case of a resume request, the directory that containsthe old mapfile and data
* @param inputType The input type of the data (bibtex, csv, etc.), in case of local file
* @param context The context
* @param template whether to use template item
* @param collections The collections the created items will be inserted to, apart from the owning one
* @param resumeDir In case of a resume request, the directory that containsthe old mapfile and data
* @param inputType The input type of the data (bibtex, csv, etc.), in case of local file
* @param context The context
* @param template whether to use template item
* @throws Exception if error
*/
public void processUIImport(String url, Collection owningCollection, String[] collections, String resumeDir,
String inputType, Context context, boolean template) throws Exception;
public void processUIImport(String url, Collection owningCollection, String[] collections, String resumeDir, String inputType, Context context, boolean template) throws Exception;
/**
* Since the BTE batch import is done in a new thread we are unable to communicate
@@ -110,13 +105,16 @@ public interface ItemImportService {
* communication with email instead. Send a success email once the batch
* import is complete
*
* @param context - the current Context
* @param eperson - eperson to send the email to
* @param fileName - the filepath to the mapfile created by the batch import
* @param context
* - the current Context
* @param eperson
* - eperson to send the email to
* @param fileName
* - the filepath to the mapfile created by the batch import
* @throws MessagingException if error
*/
public void emailSuccessMessage(Context context, EPerson eperson,
String fileName) throws MessagingException;
String fileName) throws MessagingException;
/**
* Since the BTE batch import is done in a new thread we are unable to communicate
@@ -124,38 +122,37 @@ public interface ItemImportService {
* communication with email instead. Send an error email if the batch
* import fails
*
* @param eperson - EPerson to send the error message to
* @param error - the error message
* @param eperson
* - EPerson to send the error message to
* @param error
* - the error message
* @throws MessagingException if error
*/
public void emailErrorMessage(EPerson eperson, String error)
throws MessagingException;
throws MessagingException;
/**
* Get imports available for a person
*
* @param eperson EPerson object
* @return List of batch uploads
* @throws Exception if error
*/
public List<BatchUpload> getImportsAvailable(EPerson eperson)
throws Exception;
throws Exception;
/**
* Get import upload directory
*
* @param ePerson EPerson object
* @return directory
* @throws Exception if error
*/
public String getImportUploadableDirectory(EPerson ePerson)
throws Exception;
throws Exception;
/**
* Delete a batch by ID
*
* @param c DSpace Context
* @param c DSpace Context
* @param uploadId identifier
* @throws Exception if error
*/
@@ -163,21 +160,18 @@ public interface ItemImportService {
/**
* Replace items
*
* @param c DSpace Context
* @param c DSpace Context
* @param mycollections List of Collections
* @param sourcedir source directory
* @param mapfile map file
* @param template whether to use template item
* @param sourcedir source directory
* @param mapfile map file
* @param template whether to use template item
* @throws Exception if error
*/
public void replaceItems(Context c, List<Collection> mycollections, String sourcedir, String mapfile,
boolean template) throws Exception;
public void replaceItems(Context c, List<Collection> mycollections, String sourcedir, String mapfile, boolean template) throws Exception;
/**
* Delete items via mapfile
*
* @param c DSpace Context
* @param c DSpace Context
* @param mapfile map file
* @throws Exception if error
*/
@@ -185,33 +179,28 @@ public interface ItemImportService {
/**
* Add items
*
* @param c DSpace Context
* @param c DSpace Context
* @param mycollections List of Collections
* @param sourcedir source directory
* @param mapfile map file
* @param template whether to use template item
* @param bteInputType The input type of the data (bibtex, csv, etc.), in case of local file
* @param workingDir working directory
* @param sourcedir source directory
* @param mapfile map file
* @param template whether to use template item
* @param bteInputType The input type of the data (bibtex, csv, etc.), in case of local file
* @param workingDir working directory
* @throws Exception if error
*/
public void addBTEItems(Context c, List<Collection> mycollections, String sourcedir, String mapfile,
boolean template, String bteInputType, String workingDir) throws Exception;
public void addBTEItems(Context c, List<Collection> mycollections, String sourcedir, String mapfile, boolean template, String bteInputType, String workingDir) throws Exception;
/**
* Get temporary work directory
*
* @return directory as string
*/
public String getTempWorkDir();
/**
* Get temporary work directory (as File)
*
* @return directory as File
* @throws java.io.IOException if the directory cannot be created.
*/
public File getTempWorkDirFile() throws IOException;
public File getTempWorkDirFile();
/**
* Cleanup
@@ -220,21 +209,18 @@ public interface ItemImportService {
/**
* Set test flag
*
* @param isTest true or false
*/
public void setTest(boolean isTest);
/**
* Set resume flag
*
* @param isResume true or false
*/
public void setResume(boolean isResume);
/**
* Set use workflow
*
* @param useWorkflow whether to enable workflow
*/
public void setUseWorkflow(boolean useWorkflow);
@@ -246,7 +232,6 @@ public interface ItemImportService {
/**
* Set quiet flag
*
* @param isQuiet true or false
*/
public void setQuiet(boolean isQuiet);

View File

@@ -25,37 +25,40 @@ import org.springframework.beans.factory.annotation.Autowired;
* based on the existence of bitstreams within the ORIGINAL bundle.
*
* @author Kostas Stamatis
*
*/
public class ItemMarkingAvailabilityBitstreamStrategy implements ItemMarkingExtractor {
private String availableImageName;
private String nonAvailableImageName;
private String availableImageName;
private String nonAvailableImageName;
@Autowired(required = true)
protected ItemService itemService;
public ItemMarkingAvailabilityBitstreamStrategy() {
public ItemMarkingAvailabilityBitstreamStrategy() {
}
}
@Override
public ItemMarkingInfo getItemMarkingInfo(Context context, Item item)
throws SQLException {
@Override
public ItemMarkingInfo getItemMarkingInfo(Context context, Item item)
throws SQLException {
List<Bundle> bundles = itemService.getBundles(item, "ORIGINAL");
if (bundles.size() == 0) {
ItemMarkingInfo markInfo = new ItemMarkingInfo();
markInfo.setImageName(nonAvailableImageName);
List<Bundle> bundles = itemService.getBundles(item, "ORIGINAL");
if (bundles.size() == 0){
ItemMarkingInfo markInfo = new ItemMarkingInfo();
markInfo.setImageName(nonAvailableImageName);
return markInfo;
} else {
Bundle originalBundle = bundles.iterator().next();
if (originalBundle.getBitstreams().size() == 0) {
ItemMarkingInfo markInfo = new ItemMarkingInfo();
markInfo.setImageName(nonAvailableImageName);
return markInfo;
}
else {
Bundle originalBundle = bundles.iterator().next();
if (originalBundle.getBitstreams().size() == 0){
ItemMarkingInfo markInfo = new ItemMarkingInfo();
markInfo.setImageName(nonAvailableImageName);
return markInfo;
} else {
return markInfo;
}
else {
Bitstream bitstream = originalBundle.getBitstreams().get(0);
ItemMarkingInfo signInfo = new ItemMarkingInfo();
@@ -63,31 +66,32 @@ public class ItemMarkingAvailabilityBitstreamStrategy implements ItemMarkingExtr
signInfo.setTooltip(bitstream.getName());
String bsLink = "";
String bsLink = "";
bsLink = bsLink + "bitstream/"
+ item.getHandle() + "/"
+ bitstream.getSequenceID() + "/";
+ item.getHandle() + "/"
+ bitstream.getSequenceID() + "/";
try {
bsLink = bsLink + Util.encodeBitstreamName(bitstream.getName(), Constants.DEFAULT_ENCODING);
} catch (UnsupportedEncodingException e) {
bsLink = bsLink + Util.encodeBitstreamName(bitstream.getName(), Constants.DEFAULT_ENCODING);
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
}
e.printStackTrace();
}
signInfo.setLink(bsLink);
signInfo.setLink(bsLink);
return signInfo;
}
}
}
return signInfo;
}
}
}
public void setAvailableImageName(String availableImageName) {
this.availableImageName = availableImageName;
}
public void setAvailableImageName(String availableImageName) {
this.availableImageName = availableImageName;
}
public void setNonAvailableImageName(String nonAvailableImageName) {
this.nonAvailableImageName = nonAvailableImageName;
}
public void setNonAvailableImageName(String nonAvailableImageName) {
this.nonAvailableImageName = nonAvailableImageName;
}
}

View File

@@ -20,30 +20,31 @@ import org.dspace.core.Context;
* based on the collection the items belong to
*
* @author Kostas Stamatis
*
*/
public class ItemMarkingCollectionStrategy implements ItemMarkingExtractor {
Map<String, ItemMarkingInfo> mapping = new HashMap<String, ItemMarkingInfo>();
Map<String, ItemMarkingInfo> mapping = new HashMap<String, ItemMarkingInfo>();
public ItemMarkingCollectionStrategy() {
}
public ItemMarkingCollectionStrategy() {
}
@Override
public ItemMarkingInfo getItemMarkingInfo(Context context, Item item)
throws SQLException {
@Override
public ItemMarkingInfo getItemMarkingInfo(Context context, Item item)
throws SQLException {
if (mapping != null) {
for (Collection collection : item.getCollections()) {
if (mapping.containsKey(collection.getHandle())) {
return mapping.get(collection.getHandle());
}
}
}
if (mapping!=null){
for (Collection collection : item.getCollections()){
if (mapping.containsKey(collection.getHandle())){
return mapping.get(collection.getHandle());
}
}
}
return null;
}
return null;
}
public void setMapping(Map<String, ItemMarkingInfo> mapping) {
this.mapping = mapping;
}
public void setMapping(Map<String, ItemMarkingInfo> mapping) {
this.mapping = mapping;
}
}

View File

@@ -16,8 +16,9 @@ import org.dspace.core.Context;
* Interface to abstract the strategy for item signing
*
* @author Kostas Stamatis
*
*/
public interface ItemMarkingExtractor {
public ItemMarkingInfo getItemMarkingInfo(Context context, Item item)
throws SQLException;
public ItemMarkingInfo getItemMarkingInfo(Context context, Item item)
throws SQLException;
}

View File

@@ -11,46 +11,47 @@ package org.dspace.app.itemmarking;
* Simple DTO to transfer data about the marking info for an item
*
* @author Kostas Stamatis
*
*/
public class ItemMarkingInfo {
private String imageName;
private String classInfo;
private String tooltip;
private String link;
private String imageName;
private String classInfo;
private String tooltip;
private String link;
public ItemMarkingInfo() {
super();
}
public ItemMarkingInfo() {
super();
}
public String getImageName() {
return imageName;
}
public String getImageName() {
return imageName;
}
public void setImageName(String imageName) {
this.imageName = imageName;
}
public void setImageName(String imageName) {
this.imageName = imageName;
}
public String getTooltip() {
return tooltip;
}
public String getTooltip() {
return tooltip;
}
public void setTooltip(String tooltip) {
this.tooltip = tooltip;
}
public void setTooltip(String tooltip) {
this.tooltip = tooltip;
}
public String getLink() {
return link;
}
public String getLink() {
return link;
}
public void setLink(String link) {
this.link = link;
}
public void setLink(String link) {
this.link = link;
}
public String getClassInfo() {
return classInfo;
}
public String getClassInfo() {
return classInfo;
}
public void setClassInfo(String classInfo) {
this.classInfo = classInfo;
}
public void setClassInfo(String classInfo) {
this.classInfo = classInfo;
}
}

View File

@@ -24,41 +24,44 @@ import org.springframework.beans.factory.annotation.Autowired;
* metadata field
*
* @author Kostas Stamatis
*
*/
public class ItemMarkingMetadataStrategy implements ItemMarkingExtractor {
@Autowired(required = true)
protected ItemService itemService;
private String metadataField;
Map<String, ItemMarkingInfo> mapping = new HashMap<String, ItemMarkingInfo>();
private String metadataField;
Map<String, ItemMarkingInfo> mapping = new HashMap<String, ItemMarkingInfo>();
public ItemMarkingMetadataStrategy() {
}
public ItemMarkingMetadataStrategy() {
}
@Override
public ItemMarkingInfo getItemMarkingInfo(Context context, Item item)
throws SQLException {
@Override
public ItemMarkingInfo getItemMarkingInfo(Context context, Item item)
throws SQLException {
if (metadataField != null && mapping != null) {
List<MetadataValue> vals = itemService.getMetadataByMetadataString(item, metadataField);
if (vals.size() > 0) {
for (MetadataValue value : vals) {
String type = value.getValue();
if (mapping.containsKey(type)) {
return mapping.get(type);
}
}
}
}
return null;
}
if (metadataField != null && mapping!=null)
{
List<MetadataValue> vals = itemService.getMetadataByMetadataString(item, metadataField);
if (vals.size() > 0)
{
for (MetadataValue value : vals){
String type = value.getValue();
if (mapping.containsKey(type)){
return mapping.get(type);
}
}
}
}
return null;
}
public void setMetadataField(String metadataField) {
this.metadataField = metadataField;
}
public void setMetadataField(String metadataField) {
this.metadataField = metadataField;
}
public void setMapping(Map<String, ItemMarkingInfo> mapping) {
this.mapping = mapping;
}
public void setMapping(Map<String, ItemMarkingInfo> mapping) {
this.mapping = mapping;
}
}

View File

@@ -12,70 +12,80 @@ import java.util.LinkedHashMap;
import java.util.Map;
/**
* Container for UpdateActions
* Order of actions is very import for correct processing. This implementation
* supports an iterator that returns the actions in the order in which they are
* put in. Adding the same action a second time has no effect on this order.
* Container for UpdateActions
* Order of actions is very import for correct processing. This implementation
* supports an iterator that returns the actions in the order in which they are
* put in. Adding the same action a second time has no effect on this order.
*
*
*/
public class ActionManager implements Iterable<UpdateAction> {
protected Map<Class<? extends UpdateAction>, UpdateAction> registry
= new LinkedHashMap<Class<? extends UpdateAction>, UpdateAction>();
protected Map<Class<? extends UpdateAction>, UpdateAction> registry
= new LinkedHashMap<Class<? extends UpdateAction>, UpdateAction>();
/**
* Get update action
*
* @param actionClass UpdateAction class
* @return instantiation of UpdateAction class
* @throws InstantiationException if instantiation error
* @throws IllegalAccessException if illegal access error
*/
public UpdateAction getUpdateAction(Class<? extends UpdateAction> actionClass)
throws InstantiationException, IllegalAccessException {
UpdateAction action = registry.get(actionClass);
/**
* Get update action
* @param actionClass UpdateAction class
* @return instantiation of UpdateAction class
* @throws InstantiationException if instantiation error
* @throws IllegalAccessException if illegal access error
*/
public UpdateAction getUpdateAction(Class<? extends UpdateAction> actionClass)
throws InstantiationException, IllegalAccessException
{
UpdateAction action = registry.get(actionClass);
if (action == null) {
action = actionClass.newInstance();
registry.put(actionClass, action);
}
if (action == null)
{
action = actionClass.newInstance();
registry.put(actionClass, action);
}
return action;
}
return action;
}
/**
* @return whether any actions have been registered with this manager
*/
public boolean hasActions() {
return !registry.isEmpty();
}
/**
*
* @return whether any actions have been registered with this manager
*/
public boolean hasActions()
{
return !registry.isEmpty();
}
/**
* This implementation guarantees the iterator order is the same as the order
* in which updateActions have been added
*
* @return iterator for UpdateActions
*/
@Override
public Iterator<UpdateAction> iterator() {
return new Iterator<UpdateAction>() {
private Iterator<Class<? extends UpdateAction>> itr = registry.keySet().iterator();
/**
* This implementation guarantees the iterator order is the same as the order
* in which updateActions have been added
*
* @return iterator for UpdateActions
*/
@Override
public Iterator<UpdateAction> iterator()
{
return new Iterator<UpdateAction>()
{
private Iterator<Class<? extends UpdateAction>> itr = registry.keySet().iterator();
@Override
public boolean hasNext() {
return itr.hasNext();
}
@Override
public boolean hasNext()
{
return itr.hasNext();
}
@Override
public UpdateAction next() {
return registry.get(itr.next());
}
@Override
public UpdateAction next()
{
return registry.get(itr.next());
}
//not supported
@Override
public void remove() {
throw new UnsupportedOperationException();
}
};
//not supported
@Override
public void remove()
{
throw new UnsupportedOperationException();
}
};
}
}
}

View File

@@ -19,11 +19,7 @@ import java.util.List;
import org.dspace.authorize.AuthorizeException;
import org.dspace.authorize.factory.AuthorizeServiceFactory;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.content.Bitstream;
import org.dspace.content.BitstreamFormat;
import org.dspace.content.Bundle;
import org.dspace.content.DCDate;
import org.dspace.content.Item;
import org.dspace.content.*;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.BitstreamFormatService;
import org.dspace.content.service.InstallItemService;
@@ -33,106 +29,116 @@ import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.GroupService;
/**
* Action to add bitstreams listed in item contents file to the item in DSpace
* Action to add bitstreams listed in item contents file to the item in DSpace
*
*
*/
public class AddBitstreamsAction extends UpdateBitstreamsAction {
protected AuthorizeService authorizeService = AuthorizeServiceFactory.getInstance().getAuthorizeService();
protected BitstreamFormatService bitstreamFormatService = ContentServiceFactory.getInstance()
.getBitstreamFormatService();
protected BitstreamFormatService bitstreamFormatService = ContentServiceFactory.getInstance().getBitstreamFormatService();
protected GroupService groupService = EPersonServiceFactory.getInstance().getGroupService();
protected InstallItemService installItemService = ContentServiceFactory.getInstance().getInstallItemService();
public AddBitstreamsAction() {
//empty
}
public AddBitstreamsAction()
{
//empty
}
/**
* Adds bitstreams from the archive as listed in the contents file.
*
* @param context DSpace Context
* @param itarch Item Archive
* @param isTest test flag
* @param suppressUndo undo flag
* @throws IOException if IO error
* @throws IllegalArgumentException if arg exception
* @throws SQLException if database error
* @throws AuthorizeException if authorization error
* @throws ParseException if parse error
*/
@Override
/**
* Adds bitstreams from the archive as listed in the contents file.
*
* @param context DSpace Context
* @param itarch Item Archive
* @param isTest test flag
* @param suppressUndo undo flag
* @throws IOException if IO error
* @throws IllegalArgumentException if arg exception
* @throws SQLException if database error
* @throws AuthorizeException if authorization error
* @throws ParseException if parse error
*/
@Override
public void execute(Context context, ItemArchive itarch, boolean isTest,
boolean suppressUndo) throws IllegalArgumentException,
ParseException, IOException, AuthorizeException, SQLException {
Item item = itarch.getItem();
File dir = itarch.getDirectory();
boolean suppressUndo) throws IllegalArgumentException,
ParseException, IOException, AuthorizeException, SQLException
{
Item item = itarch.getItem();
File dir = itarch.getDirectory();
List<ContentsEntry> contents = MetadataUtilities.readContentsFile(new File(dir, ItemUpdate.CONTENTS_FILE));
List<ContentsEntry> contents = MetadataUtilities.readContentsFile(new File(dir, ItemUpdate.CONTENTS_FILE));
if (contents.isEmpty()) {
ItemUpdate.pr("Contents is empty - no bitstreams to add");
return;
}
if (contents.isEmpty())
{
ItemUpdate.pr("Contents is empty - no bitstreams to add");
return;
}
ItemUpdate.pr("Contents bitstream count: " + contents.size());
ItemUpdate.pr("Contents bitstream count: " + contents.size());
String[] files = dir.list(ItemUpdate.fileFilter);
List<String> fileList = new ArrayList<String>();
for (String filename : files) {
fileList.add(filename);
ItemUpdate.pr("file: " + filename);
}
String[] files = dir.list(ItemUpdate.fileFilter);
List<String> fileList = new ArrayList<String>();
for (String filename : files)
{
fileList.add(filename);
ItemUpdate.pr("file: " + filename);
}
for (ContentsEntry ce : contents) {
//validate match to existing file in archive
if (!fileList.contains(ce.filename)) {
throw new IllegalArgumentException("File listed in contents is missing: " + ce.filename);
}
}
int bitstream_bundles_updated = 0;
for (ContentsEntry ce : contents)
{
//validate match to existing file in archive
if (!fileList.contains(ce.filename))
{
throw new IllegalArgumentException("File listed in contents is missing: " + ce.filename);
}
}
int bitstream_bundles_updated = 0;
//now okay to add
for (ContentsEntry ce : contents) {
String targetBundleName = addBitstream(context, itarch, item, dir, ce, suppressUndo, isTest);
if (!targetBundleName.equals("")
&& !targetBundleName.equals("THUMBNAIL")
&& !targetBundleName.equals("TEXT")) {
bitstream_bundles_updated++;
}
}
//now okay to add
for (ContentsEntry ce : contents)
{
String targetBundleName = addBitstream(context, itarch, item, dir, ce, suppressUndo, isTest);
if (!targetBundleName.equals("")
&& !targetBundleName.equals("THUMBNAIL")
&& !targetBundleName.equals("TEXT"))
{
bitstream_bundles_updated++;
}
}
if (alterProvenance && bitstream_bundles_updated > 0) {
DtoMetadata dtom = DtoMetadata.create("dc.description.provenance", "en", "");
if (alterProvenance && bitstream_bundles_updated > 0)
{
DtoMetadata dtom = DtoMetadata.create("dc.description.provenance", "en", "");
String append = ". Added " + Integer.toString(bitstream_bundles_updated)
+ " bitstream(s) on " + DCDate.getCurrent() + " : "
+ installItemService.getBitstreamProvenanceMessage(context, item);
MetadataUtilities.appendMetadata(context, item, dtom, false, append);
}
}
String append = ". Added " + Integer.toString(bitstream_bundles_updated)
+ " bitstream(s) on " + DCDate.getCurrent() + " : "
+ installItemService.getBitstreamProvenanceMessage(context, item);
MetadataUtilities.appendMetadata(context, item, dtom, false, append);
}
}
/**
* Add bitstream
*
* @param context DSpace Context
* @param itarch Item Archive
* @param item DSpace Item
* @param dir directory
* @param ce contents entry for bitstream
* @param suppressUndo undo flag
* @param isTest test flag
* @return bundle name
* @throws IOException if IO error
* @throws IllegalArgumentException if arg exception
* @throws SQLException if database error
* @throws AuthorizeException if authorization error
* @throws ParseException if parse error
*/
protected String addBitstream(Context context, ItemArchive itarch, Item item, File dir,
ContentsEntry ce, boolean suppressUndo, boolean isTest)
throws IOException, IllegalArgumentException, SQLException, AuthorizeException, ParseException {
ItemUpdate.pr("contents entry for bitstream: " + ce.toString());
File f = new File(dir, ce.filename);
/**
* Add bitstream
* @param context DSpace Context
* @param itarch Item Archive
* @param item DSpace Item
* @param dir directory
* @param ce contents entry for bitstream
* @param suppressUndo undo flag
* @param isTest test flag
* @return bundle name
* @throws IOException if IO error
* @throws IllegalArgumentException if arg exception
* @throws SQLException if database error
* @throws AuthorizeException if authorization error
* @throws ParseException if parse error
*/
protected String addBitstream(Context context, ItemArchive itarch, Item item, File dir,
ContentsEntry ce, boolean suppressUndo, boolean isTest)
throws IOException, IllegalArgumentException, SQLException, AuthorizeException, ParseException
{
ItemUpdate.pr("contents entry for bitstream: " + ce.toString());
File f = new File(dir, ce.filename);
// get an input stream
BufferedInputStream bis = new BufferedInputStream(new FileInputStream(f));
@@ -140,69 +146,84 @@ public class AddBitstreamsAction extends UpdateBitstreamsAction {
Bitstream bs = null;
String newBundleName = ce.bundlename;
if (ce.bundlename == null) { // should be required but default convention established
if (ce.filename.equals("license.txt")) {
if (ce.bundlename == null) // should be required but default convention established
{
if (ce.filename.equals("license.txt"))
{
newBundleName = "LICENSE";
} else {
}
else
{
newBundleName = "ORIGINAL";
}
}
ItemUpdate.pr(" Bitstream " + ce.filename + " to be added to bundle: " + newBundleName);
if (!isTest) {
// find the bundle
List<Bundle> bundles = itemService.getBundles(item, newBundleName);
Bundle targetBundle = null;
if (!isTest)
{
// find the bundle
List<Bundle> bundles = itemService.getBundles(item, newBundleName);
Bundle targetBundle = null;
if (bundles.size() < 1) {
// not found, create a new one
targetBundle = bundleService.create(context, item, newBundleName);
} else {
//verify bundle + name are not duplicates
for (Bundle b : bundles) {
List<Bitstream> bitstreams = b.getBitstreams();
for (Bitstream bsm : bitstreams) {
if (bsm.getName().equals(ce.filename)) {
throw new IllegalArgumentException("Duplicate bundle + filename cannot be added: "
+ b.getName() + " + " + bsm.getName());
}
}
}
if (bundles.size() < 1)
{
// not found, create a new one
targetBundle = bundleService.create(context, item, newBundleName);
}
else
{
//verify bundle + name are not duplicates
for (Bundle b : bundles)
{
List<Bitstream> bitstreams = b.getBitstreams();
for (Bitstream bsm : bitstreams)
{
if (bsm.getName().equals(ce.filename))
{
throw new IllegalArgumentException("Duplicate bundle + filename cannot be added: "
+ b.getName() + " + " + bsm.getName());
}
}
}
// select first bundle
targetBundle = bundles.iterator().next();
}
// select first bundle
targetBundle = bundles.iterator().next();
}
bs = bitstreamService.create(context, targetBundle, bis);
bs.setName(context, ce.filename);
bs = bitstreamService.create(context, targetBundle, bis);
bs.setName(context, ce.filename);
// Identify the format
// FIXME - guessing format guesses license.txt incorrectly as a text file format!
BitstreamFormat fmt = bitstreamFormatService.guessFormat(context, bs);
bitstreamService.setFormat(context, bs, fmt);
// Identify the format
// FIXME - guessing format guesses license.txt incorrectly as a text file format!
BitstreamFormat fmt = bitstreamFormatService.guessFormat(context, bs);
bitstreamService.setFormat(context, bs, fmt);
if (ce.description != null) {
bs.setDescription(context, ce.description);
}
if (ce.description != null)
{
bs.setDescription(context, ce.description);
}
if ((ce.permissionsActionId != -1) && (ce.permissionsGroupName != null)) {
Group group = groupService.findByName(context, ce.permissionsGroupName);
if ((ce.permissionsActionId != -1) && (ce.permissionsGroupName != null))
{
Group group = groupService.findByName(context, ce.permissionsGroupName);
if (group != null) {
if (group != null)
{
authorizeService.removeAllPolicies(context, bs); // remove the default policy
authorizeService.createResourcePolicy(context, bs, group, null, ce.permissionsActionId, null);
}
}
}
}
//update after all changes are applied
//update after all changes are applied
bitstreamService.update(context, bs);
if (!suppressUndo) {
itarch.addUndoDeleteContents(bs.getID());
}
return targetBundle.getName();
if (!suppressUndo)
{
itarch.addUndoDeleteContents(bs.getID());
}
return targetBundle.getName();
}
return "";
return "";
}
}

View File

@@ -11,107 +11,119 @@ import java.sql.SQLException;
import java.util.List;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.MetadataValue;
import org.dspace.content.Item;
import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchema;
import org.dspace.content.MetadataValue;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.MetadataFieldService;
import org.dspace.content.service.MetadataSchemaService;
import org.dspace.core.Context;
/**
* Action to add metadata to item
* Action to add metadata to item
*
*/
public class AddMetadataAction extends UpdateMetadataAction {
protected MetadataSchemaService metadataSchemaService = ContentServiceFactory.getInstance()
.getMetadataSchemaService();
protected MetadataSchemaService metadataSchemaService = ContentServiceFactory.getInstance().getMetadataSchemaService();
protected MetadataFieldService metadataFieldService = ContentServiceFactory.getInstance().getMetadataFieldService();
/**
* Adds metadata specified in the source archive
*
* @param context DSpace Context
* @param itarch item archive
* @param isTest test flag
* @param suppressUndo undo flag
* @throws AuthorizeException if authorization error
* @throws SQLException if database error
*/
@Override
/**
* Adds metadata specified in the source archive
*
* @param context DSpace Context
* @param itarch item archive
* @param isTest test flag
* @param suppressUndo undo flag
* @throws AuthorizeException if authorization error
* @throws SQLException if database error
*/
@Override
public void execute(Context context, ItemArchive itarch, boolean isTest,
boolean suppressUndo) throws AuthorizeException, SQLException {
Item item = itarch.getItem();
String dirname = itarch.getDirectoryName();
boolean suppressUndo) throws AuthorizeException, SQLException
{
Item item = itarch.getItem();
String dirname = itarch.getDirectoryName();
for (DtoMetadata dtom : itarch.getMetadataFields()) {
for (String f : targetFields) {
if (dtom.matches(f, false)) {
// match against metadata for this field/value in repository
// qualifier must be strictly matched, possibly null
List<MetadataValue> ardcv = null;
ardcv = itemService.getMetadata(item, dtom.schema, dtom.element, dtom.qualifier, Item.ANY);
for (DtoMetadata dtom : itarch.getMetadataFields())
{
for (String f : targetFields)
{
if (dtom.matches(f, false))
{
// match against metadata for this field/value in repository
// qualifier must be strictly matched, possibly null
List<MetadataValue> ardcv = null;
ardcv = itemService.getMetadata(item, dtom.schema, dtom.element, dtom.qualifier, Item.ANY);
boolean found = false;
for (MetadataValue dcv : ardcv) {
if (dcv.getValue().equals(dtom.value)) {
found = true;
break;
}
}
boolean found = false;
for (MetadataValue dcv : ardcv)
{
if (dcv.getValue().equals(dtom.value))
{
found = true;
break;
}
}
if (found) {
ItemUpdate.pr("Warning: No new metadata found to add to item " + dirname
+ " for element " + f);
} else {
if (isTest) {
ItemUpdate.pr("Metadata to add: " + dtom.toString());
//validity tests that would occur in actual processing
// If we're just test the import, let's check that the actual metadata field exists.
MetadataSchema foundSchema = metadataSchemaService.find(context, dtom.schema);
if (found)
{
ItemUpdate.pr("Warning: No new metadata found to add to item " + dirname
+ " for element " + f);
}
else
{
if (isTest)
{
ItemUpdate.pr("Metadata to add: " + dtom.toString());
//validity tests that would occur in actual processing
// If we're just test the import, let's check that the actual metadata field exists.
MetadataSchema foundSchema = metadataSchemaService.find(context, dtom.schema);
if (foundSchema == null) {
ItemUpdate.pr("ERROR: schema '"
+ dtom.schema + "' was not found in the registry; found on item " +
dirname);
} else {
MetadataField foundField = metadataFieldService
.findByElement(context, foundSchema, dtom.element, dtom.qualifier);
if (foundSchema == null)
{
ItemUpdate.pr("ERROR: schema '"
+ dtom.schema + "' was not found in the registry; found on item " + dirname);
}
else
{
MetadataField foundField = metadataFieldService.findByElement(context, foundSchema, dtom.element, dtom.qualifier);
if (foundField == null) {
ItemUpdate.pr("ERROR: Metadata field: '" + dtom.schema + "." + dtom.element + "."
+ dtom.qualifier + "' not found in registry; found on item " +
dirname);
}
}
} else {
itemService
.addMetadata(context, item, dtom.schema, dtom.element, dtom.qualifier, dtom.language,
dtom.value);
ItemUpdate.pr("Metadata added: " + dtom.toString());
if (foundField == null)
{
ItemUpdate.pr("ERROR: Metadata field: '" + dtom.schema + "." + dtom.element + "."
+ dtom.qualifier + "' not found in registry; found on item " + dirname);
}
}
}
else
{
itemService.addMetadata(context, item, dtom.schema, dtom.element, dtom.qualifier, dtom.language, dtom.value);
ItemUpdate.pr("Metadata added: " + dtom.toString());
if (!suppressUndo) {
//itarch.addUndoDtom(dtom);
//ItemUpdate.pr("Undo metadata: " + dtom);
if (!suppressUndo)
{
//itarch.addUndoDtom(dtom);
//ItemUpdate.pr("Undo metadata: " + dtom);
// add all as a replace record to be preceded by delete
for (MetadataValue dcval : ardcv) {
// add all as a replace record to be preceded by delete
for (MetadataValue dcval : ardcv)
{
MetadataField metadataField = dcval.getMetadataField();
MetadataSchema metadataSchema = metadataField.getMetadataSchema();
itarch.addUndoMetadataField(
DtoMetadata.create(metadataSchema.getName(), metadataField.getElement(),
metadataField.getQualifier(), dcval.getLanguage(),
dcval.getValue()));
}
itarch.addUndoMetadataField(DtoMetadata.create(metadataSchema.getName(), metadataField.getElement(),
metadataField.getQualifier(), dcval.getLanguage(), dcval.getValue()));
}
}
}
}
break; // don't need to check if this field matches any other target fields
}
}
}
}
}
}
}
break; // don't need to check if this field matches any other target fields
}
}
}
}
}

View File

@@ -7,49 +7,55 @@
*/
package org.dspace.app.itemupdate;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
import java.io.InputStream;
import java.io.FileInputStream;
import org.dspace.content.Bitstream;
/**
* Filter interface to be used by ItemUpdate
* to determine which bitstreams in an Item
* acceptable for removal.
* Filter interface to be used by ItemUpdate
* to determine which bitstreams in an Item
* acceptable for removal.
*
*/
public abstract class BitstreamFilter {
protected Properties props = null;
protected Properties props = null;
/**
* The filter method
*
* @param bitstream Bitstream
* @return whether the bitstream matches the criteria
* @throws BitstreamFilterException if filter error
*/
public abstract boolean accept(Bitstream bitstream) throws BitstreamFilterException;
/**
* The filter method
*
* @param bitstream Bitstream
* @return whether the bitstream matches the criteria
* @throws BitstreamFilterException if filter error
*/
public abstract boolean accept(Bitstream bitstream) throws BitstreamFilterException;
/**
* @param filepath - The complete path for the properties file
* @throws IOException if IO error
*/
public void initProperties(String filepath)
throws IOException {
props = new Properties();
/**
*
* @param filepath - The complete path for the properties file
* @throws IOException if IO error
*/
public void initProperties(String filepath)
throws IOException
{
props = new Properties();
InputStream in = null;
InputStream in = null;
try {
try
{
in = new FileInputStream(filepath);
props.load(in);
} finally {
if (in != null) {
}
finally
{
if (in != null)
{
in.close();
}
}
}
}
}

View File

@@ -14,44 +14,55 @@ import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
/**
* BitstreamFilter implementation to filter by bundle name
* BitstreamFilter implementation to filter by bundle name
*
*/
public class BitstreamFilterByBundleName extends BitstreamFilter {
protected String bundleName;
protected String bundleName;
public BitstreamFilterByBundleName() {
//empty
}
public BitstreamFilterByBundleName()
{
//empty
}
/**
* Filter bitstream based on bundle name found in properties file
*
* @param bitstream Bitstream
* @return whether bitstream is in bundle
* @throws BitstreamFilterException if filter error
*/
@Override
public boolean accept(Bitstream bitstream)
throws BitstreamFilterException {
if (bundleName == null) {
bundleName = props.getProperty("bundle");
if (bundleName == null) {
throw new BitstreamFilterException("Property 'bundle' not found.");
}
}
/**
* Filter bitstream based on bundle name found in properties file
*
* @param bitstream Bitstream
* @throws BitstreamFilterException if filter error
* @return whether bitstream is in bundle
*
*/
@Override
public boolean accept(Bitstream bitstream)
throws BitstreamFilterException
{
if (bundleName == null)
{
bundleName = props.getProperty("bundle");
if (bundleName == null)
{
throw new BitstreamFilterException("Property 'bundle' not found.");
}
}
try {
List<Bundle> bundles = bitstream.getBundles();
for (Bundle b : bundles) {
if (b.getName().equals(bundleName)) {
return true;
}
}
} catch (SQLException e) {
throw new BitstreamFilterException(e);
}
return false;
}
try
{
List<Bundle> bundles = bitstream.getBundles();
for (Bundle b : bundles)
{
if (b.getName().equals(bundleName))
{
return true;
}
}
}
catch(SQLException e)
{
throw new BitstreamFilterException(e);
}
return false;
}
}

View File

@@ -7,43 +7,47 @@
*/
package org.dspace.app.itemupdate;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.regex.*;
import org.dspace.content.Bitstream;
/**
* BitstreamFilter implementation to filter by filename pattern
*
*/
public class BitstreamFilterByFilename extends BitstreamFilter {
protected Pattern pattern;
protected Pattern pattern;
protected String filenameRegex;
public BitstreamFilterByFilename() {
//empty
}
public BitstreamFilterByFilename()
{
//empty
}
/**
* Tests bitstream by matching the regular expression in the
* properties against the bitstream name
*
* @param bitstream Bitstream
* @return whether bitstream name matches the regular expression
* @throws BitstreamFilterException if filter error
*/
@Override
public boolean accept(Bitstream bitstream) throws BitstreamFilterException {
if (filenameRegex == null) {
filenameRegex = props.getProperty("filename");
if (filenameRegex == null) {
throw new BitstreamFilterException("BitstreamFilter property 'filename' not found.");
}
pattern = Pattern.compile(filenameRegex);
}
/**
* Tests bitstream by matching the regular expression in the
* properties against the bitstream name
*
* @param bitstream Bitstream
* @return whether bitstream name matches the regular expression
* @exception BitstreamFilterException if filter error
*/
@Override
public boolean accept(Bitstream bitstream) throws BitstreamFilterException
{
if (filenameRegex == null)
{
filenameRegex = props.getProperty("filename");
if (filenameRegex == null)
{
throw new BitstreamFilterException("BitstreamFilter property 'filename' not found.");
}
pattern = Pattern.compile(filenameRegex);
}
Matcher m = pattern.matcher(bitstream.getName());
return m.matches();
}
Matcher m = pattern.matcher(bitstream.getName());
return m.matches();
}
}

View File

@@ -8,27 +8,30 @@
package org.dspace.app.itemupdate;
/**
* Exception class for BitstreamFilters
* Exception class for BitstreamFilters
*
*/
public class BitstreamFilterException extends Exception {
public class BitstreamFilterException extends Exception
{
private static final long serialVersionUID = 1L;
private static final long serialVersionUID = 1L;
public BitstreamFilterException() {
}
/**
* @param msg exception message
*/
public BitstreamFilterException(String msg) {
super(msg);
}
/**
* @param e exception
*/
public BitstreamFilterException(Exception e) {
super(e);
}
public BitstreamFilterException() {}
/**
*
* @param msg exception message
*/
public BitstreamFilterException(String msg)
{
super(msg);
}
/**
*
* @param e exception
*/
public BitstreamFilterException(Exception e)
{
super(e);
}
}

View File

@@ -8,124 +8,148 @@
package org.dspace.app.itemupdate;
import java.text.ParseException;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.regex.*;
import org.dspace.core.Constants;
/**
* Holds the elements of a line in the Contents Entry file
* Holds the elements of a line in the Contents Entry file
*
* Based on private methods in ItemImport
* Based on private methods in ItemImport
*
* Lacking a spec or full documentation for the file format,
* it looks from the source code that the ordering or elements is not fixed
*
* e.g.:
* {@code
* 48217870-MIT.pdf\tbundle: bundlename\tpermissions: -r 'MIT Users'\tdescription: Full printable version (MIT only)
* permissions: -[r|w] ['group name']
* description: <the description of the file>
* }
*
* Lacking a spec or full documentation for the file format,
* it looks from the source code that the ordering or elements is not fixed
*
* e.g.:
* {@code
* 48217870-MIT.pdf\tbundle: bundlename\tpermissions: -r 'MIT Users'\tdescription: Full printable version (MIT only)
* permissions: -[r|w] ['group name']
* description: <the description of the file>
* }
*/
public class ContentsEntry {
public static final String HDR_BUNDLE = "bundle:";
public static final String HDR_PERMISSIONS = "permissions:";
public static final String HDR_DESCRIPTION = "description:";
public class ContentsEntry
{
public static final String HDR_BUNDLE = "bundle:";
public static final String HDR_PERMISSIONS = "permissions:";
public static final String HDR_DESCRIPTION = "description:";
public static final Pattern permissionsPattern = Pattern.compile("-([rw])\\s*'?([^']+)'?");
public static final Pattern permissionsPattern = Pattern.compile("-([rw])\\s*'?([^']+)'?");
final String filename;
final String bundlename;
final String permissionsGroupName;
final int permissionsActionId;
final String description;
final String filename;
final String bundlename;
final String permissionsGroupName;
final int permissionsActionId;
final String description;
protected ContentsEntry(String filename,
String bundlename,
int permissionsActionId,
String permissionsGroupName,
String description) {
this.filename = filename;
this.bundlename = bundlename;
this.permissionsActionId = permissionsActionId;
this.permissionsGroupName = permissionsGroupName;
this.description = description;
}
protected ContentsEntry(String filename,
String bundlename,
int permissionsActionId,
String permissionsGroupName,
String description)
{
this.filename = filename;
this.bundlename = bundlename;
this.permissionsActionId = permissionsActionId;
this.permissionsGroupName = permissionsGroupName;
this.description = description;
}
/**
* Factory method parses a line from the Contents Entry file
*
* @param line line as string
* @return the parsed ContentsEntry object
* @throws ParseException if parse error
*/
public static ContentsEntry parse(String line)
throws ParseException {
String[] ar = line.split("\t");
ItemUpdate.pr("ce line split: " + ar.length);
/**
* Factory method parses a line from the Contents Entry file
*
* @param line line as string
* @return the parsed ContentsEntry object
* @throws ParseException if parse error
*/
public static ContentsEntry parse(String line)
throws ParseException
{
String[] ar = line.split("\t");
ItemUpdate.pr("ce line split: " + ar.length);
String[] arp = new String[4];
arp[0] = ar[0]; //bitstream name doesn't have header and is always first
String[] arp = new String[4];
arp[0] = ar[0]; //bitstream name doesn't have header and is always first
String groupName = null;
int actionId = -1;
String groupName = null;
int actionId = -1;
if (ar.length > 1) {
for (int i = 1; i < ar.length; i++) {
ItemUpdate.pr("ce " + i + " : " + ar[i]);
if (ar[i].startsWith(HDR_BUNDLE)) {
arp[1] = ar[i].substring(HDR_BUNDLE.length()).trim();
if (ar.length > 1)
{
for (int i=1; i < ar.length; i++)
{
ItemUpdate.pr("ce " + i + " : " + ar[i]);
if (ar[i].startsWith(HDR_BUNDLE))
{
arp[1] = ar[i].substring(HDR_BUNDLE.length()).trim();
} else if (ar[i].startsWith(HDR_PERMISSIONS)) {
arp[2] = ar[i].substring(HDR_PERMISSIONS.length()).trim();
}
else if (ar[i].startsWith(HDR_PERMISSIONS))
{
arp[2] = ar[i].substring(HDR_PERMISSIONS.length()).trim();
// parse into actionId and group name
// parse into actionId and group name
Matcher m = permissionsPattern.matcher(arp[2]);
if (m.matches()) {
String action = m.group(1); //
if (action.equals("r")) {
actionId = Constants.READ;
} else if (action.equals("w")) {
actionId = Constants.WRITE;
}
Matcher m = permissionsPattern.matcher(arp[2]);
if (m.matches())
{
String action = m.group(1); //
if (action.equals("r"))
{
actionId = Constants.READ;
}
else if (action.equals("w"))
{
actionId = Constants.WRITE;
}
groupName = m.group(2).trim();
}
groupName = m.group(2).trim();
}
} else if (ar[i].startsWith(HDR_DESCRIPTION)) {
arp[3] = ar[i].substring(HDR_DESCRIPTION.length()).trim();
}
else if (ar[i].startsWith(HDR_DESCRIPTION))
{
arp[3] = ar[i].substring(HDR_DESCRIPTION.length()).trim();
} else {
throw new ParseException("Unknown text in contents file: " + ar[i], 0);
}
}
}
return new ContentsEntry(arp[0], arp[1], actionId, groupName, arp[3]);
}
}
else
{
throw new ParseException("Unknown text in contents file: " + ar[i], 0);
}
}
}
return new ContentsEntry(arp[0], arp[1], actionId, groupName, arp[3]);
}
public String toString() {
StringBuilder sb = new StringBuilder(filename);
if (bundlename != null) {
sb.append(HDR_BUNDLE).append(" ").append(bundlename);
}
public String toString()
{
StringBuilder sb = new StringBuilder(filename);
if (bundlename != null)
{
sb.append(HDR_BUNDLE).append(" ").append(bundlename);
}
if (permissionsGroupName != null) {
sb.append(HDR_PERMISSIONS);
if (permissionsActionId == Constants.READ) {
sb.append(" -r ");
} else if (permissionsActionId == Constants.WRITE) {
sb.append(" -w ");
}
sb.append(permissionsGroupName);
}
if (permissionsGroupName != null)
{
sb.append(HDR_PERMISSIONS);
if (permissionsActionId == Constants.READ)
{
sb.append(" -r ");
}
else if (permissionsActionId == Constants.WRITE)
{
sb.append(" -w ");
}
sb.append(permissionsGroupName);
}
if (description != null) {
sb.append(HDR_DESCRIPTION).append(" ").append(description);
}
if (description != null)
{
sb.append(HDR_DESCRIPTION).append(" ").append(description);
}
return sb.toString();
}
return sb.toString();
}
}

View File

@@ -14,81 +14,99 @@ import java.text.ParseException;
import java.util.List;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
import org.dspace.content.DCDate;
import org.dspace.content.Item;
import org.dspace.content.*;
import org.dspace.core.Context;
/**
* Action to delete bitstreams
* Action to delete bitstreams
*
* Undo not supported for this UpdateAction
* Undo not supported for this UpdateAction
*
* Derivatives of the bitstream to be deleted are not also deleted
*
* Derivatives of the bitstream to be deleted are not also deleted
*/
public class DeleteBitstreamsAction extends UpdateBitstreamsAction {
/**
* Delete bitstream from item
*
* @param context DSpace Context
* @param itarch item archive
* @param isTest test flag
* @param suppressUndo undo flag
* @throws IOException if IO error
* @throws IllegalArgumentException if arg exception
* @throws SQLException if database error
* @throws AuthorizeException if authorization error
* @throws ParseException if parse error
*/
@Override
public class DeleteBitstreamsAction extends UpdateBitstreamsAction
{
/**
* Delete bitstream from item
*
* @param context DSpace Context
* @param itarch item archive
* @param isTest test flag
* @param suppressUndo undo flag
* @throws IOException if IO error
* @throws IllegalArgumentException if arg exception
* @throws SQLException if database error
* @throws AuthorizeException if authorization error
* @throws ParseException if parse error
*/
@Override
public void execute(Context context, ItemArchive itarch, boolean isTest,
boolean suppressUndo) throws IllegalArgumentException, IOException,
SQLException, AuthorizeException, ParseException {
File f = new File(itarch.getDirectory(), ItemUpdate.DELETE_CONTENTS_FILE);
if (!f.exists()) {
ItemUpdate.pr("Warning: Delete_contents file for item " + itarch.getDirectoryName() + " not found.");
} else {
List<String> list = MetadataUtilities.readDeleteContentsFile(f);
if (list.isEmpty()) {
ItemUpdate.pr("Warning: empty delete_contents file for item " + itarch.getDirectoryName());
} else {
for (String id : list) {
try {
Bitstream bs = bitstreamService.findByIdOrLegacyId(context, id);
if (bs == null) {
ItemUpdate.pr("Bitstream not found by id: " + id);
} else {
List<Bundle> bundles = bs.getBundles();
for (Bundle b : bundles) {
if (isTest) {
ItemUpdate.pr("Delete bitstream with id = " + id);
} else {
bundleService.removeBitstream(context, b, bs);
ItemUpdate.pr("Deleted bitstream with id = " + id);
boolean suppressUndo) throws IllegalArgumentException, IOException,
SQLException, AuthorizeException, ParseException
{
File f = new File(itarch.getDirectory(), ItemUpdate.DELETE_CONTENTS_FILE);
if (!f.exists())
{
ItemUpdate.pr("Warning: Delete_contents file for item " + itarch.getDirectoryName() + " not found.");
}
else
{
List<String> list = MetadataUtilities.readDeleteContentsFile(f);
if (list.isEmpty())
{
ItemUpdate.pr("Warning: empty delete_contents file for item " + itarch.getDirectoryName() );
}
else
{
for (String id : list)
{
try
{
Bitstream bs = bitstreamService.findByIdOrLegacyId(context, id);
if (bs == null)
{
ItemUpdate.pr("Bitstream not found by id: " + id);
}
else
{
List<Bundle> bundles = bs.getBundles();
for (Bundle b : bundles)
{
if (isTest)
{
ItemUpdate.pr("Delete bitstream with id = " + id);
}
else
{
bundleService.removeBitstream(context, b, bs);
ItemUpdate.pr("Deleted bitstream with id = " + id);
}
}
}
}
if (alterProvenance) {
DtoMetadata dtom = DtoMetadata.create("dc.description.provenance", "en", "");
if (alterProvenance)
{
DtoMetadata dtom = DtoMetadata.create("dc.description.provenance", "en", "");
String append = "Bitstream " + bs.getName() + " deleted on " + DCDate
.getCurrent() + "; ";
Item item = bundles.iterator().next().getItems().iterator().next();
ItemUpdate.pr("Append provenance with: " + append);
String append = "Bitstream " + bs.getName() + " deleted on " + DCDate.getCurrent() + "; ";
Item item = bundles.iterator().next().getItems().iterator().next();
ItemUpdate.pr("Append provenance with: " + append);
if (!isTest) {
MetadataUtilities.appendMetadata(context, item, dtom, false, append);
}
}
}
} catch (SQLException e) {
ItemUpdate.pr("Error finding bitstream from id: " + id + " : " + e.toString());
}
}
}
}
}
if (!isTest)
{
MetadataUtilities.appendMetadata(context, item, dtom, false, append);
}
}
}
}
catch(SQLException e)
{
ItemUpdate.pr("Error finding bitstream from id: " + id + " : " + e.toString());
}
}
}
}
}
}

View File

@@ -14,104 +14,115 @@ import java.util.ArrayList;
import java.util.List;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
import org.dspace.content.DCDate;
import org.dspace.content.Item;
import org.dspace.content.*;
import org.dspace.core.Context;
/**
* Action to delete bitstreams using a specified filter implementing BitstreamFilter
* Derivatives for the target bitstreams are not deleted.
* Action to delete bitstreams using a specified filter implementing BitstreamFilter
* Derivatives for the target bitstreams are not deleted.
*
* The dc.description.provenance field is amended to reflect the deletions
*
* Note: Multiple filters are impractical if trying to manage multiple properties files
* in a commandline environment
*
* The dc.description.provenance field is amended to reflect the deletions
*
* Note: Multiple filters are impractical if trying to manage multiple properties files
* in a commandline environment
*/
public class DeleteBitstreamsByFilterAction extends UpdateBitstreamsAction {
protected BitstreamFilter filter;
protected BitstreamFilter filter;
/**
* Set filter
*
* @param filter BitstreamFilter
*/
public void setBitstreamFilter(BitstreamFilter filter) {
this.filter = filter;
}
/**
* Set filter
*
* @param filter BitstreamFilter
*/
public void setBitstreamFilter(BitstreamFilter filter)
{
this.filter = filter;
}
/**
* Get filter
*
* @return filter
*/
public BitstreamFilter getBitstreamFilter() {
return filter;
}
/**
* Get filter
* @return filter
*/
public BitstreamFilter getBitstreamFilter()
{
return filter;
}
/**
* Delete bitstream
*
* @param context DSpace Context
* @param itarch item archive
* @param isTest test flag
* @param suppressUndo undo flag
* @throws IOException if IO error
* @throws SQLException if database error
* @throws AuthorizeException if authorization error
* @throws ParseException if parse error
* @throws BitstreamFilterException if filter error
*/
@Override
/**
* Delete bitstream
*
* @param context DSpace Context
* @param itarch item archive
* @param isTest test flag
* @param suppressUndo undo flag
* @throws IOException if IO error
* @throws SQLException if database error
* @throws AuthorizeException if authorization error
* @throws ParseException if parse error
* @throws BitstreamFilterException if filter error
*/
@Override
public void execute(Context context, ItemArchive itarch, boolean isTest,
boolean suppressUndo) throws AuthorizeException,
BitstreamFilterException, IOException, ParseException, SQLException {
boolean suppressUndo) throws AuthorizeException,
BitstreamFilterException, IOException, ParseException, SQLException
{
List<String> deleted = new ArrayList<String>();
List<String> deleted = new ArrayList<String>();
Item item = itarch.getItem();
List<Bundle> bundles = item.getBundles();
Item item = itarch.getItem();
List<Bundle> bundles = item.getBundles();
for (Bundle b : bundles) {
List<Bitstream> bitstreams = b.getBitstreams();
String bundleName = b.getName();
for (Bundle b : bundles)
{
List<Bitstream> bitstreams = b.getBitstreams();
String bundleName = b.getName();
for (Bitstream bs : bitstreams) {
if (filter.accept(bs)) {
if (isTest) {
ItemUpdate.pr("Delete from bundle " + bundleName + " bitstream " + bs.getName()
+ " with id = " + bs.getID());
} else {
//provenance is not maintained for derivative bitstreams
if (!bundleName.equals("THUMBNAIL") && !bundleName.equals("TEXT")) {
deleted.add(bs.getName());
}
bundleService.removeBitstream(context, b, bs);
ItemUpdate.pr("Deleted " + bundleName + " bitstream " + bs.getName()
+ " with id = " + bs.getID());
}
}
}
for (Bitstream bs : bitstreams)
{
if (filter.accept(bs))
{
if (isTest)
{
ItemUpdate.pr("Delete from bundle " + bundleName + " bitstream " + bs.getName()
+ " with id = " + bs.getID());
}
else
{
//provenance is not maintained for derivative bitstreams
if (!bundleName.equals("THUMBNAIL") && !bundleName.equals("TEXT"))
{
deleted.add(bs.getName());
}
bundleService.removeBitstream(context, b, bs);
ItemUpdate.pr("Deleted " + bundleName + " bitstream " + bs.getName()
+ " with id = " + bs.getID());
}
}
}
}
if (alterProvenance && !deleted.isEmpty())
{
StringBuilder sb = new StringBuilder(" Bitstreams deleted on ");
sb.append(DCDate.getCurrent()).append(": ");
for (String s : deleted)
{
sb.append(s).append(", ");
}
DtoMetadata dtom = DtoMetadata.create("dc.description.provenance", "en", "");
ItemUpdate.pr("Append provenance with: " + sb.toString());
if (!isTest)
{
MetadataUtilities.appendMetadata(context, item, dtom, false, sb.toString());
}
}
if (alterProvenance && !deleted.isEmpty()) {
StringBuilder sb = new StringBuilder(" Bitstreams deleted on ");
sb.append(DCDate.getCurrent()).append(": ");
for (String s : deleted) {
sb.append(s).append(", ");
}
DtoMetadata dtom = DtoMetadata.create("dc.description.provenance", "en", "");
ItemUpdate.pr("Append provenance with: " + sb.toString());
if (!isTest) {
MetadataUtilities.appendMetadata(context, item, dtom, false, sb.toString());
}
}
}
}
}

View File

@@ -12,54 +12,60 @@ import java.text.ParseException;
import java.util.List;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Item;
import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchema;
import org.dspace.content.MetadataValue;
import org.dspace.content.Item;
import org.dspace.core.Context;
/**
* Action to delete metadata
* Action to delete metadata
*
*
*/
public class DeleteMetadataAction extends UpdateMetadataAction {
/**
* Delete metadata from item
*
* @param context DSpace Context
* @param itarch Item Archive
* @param isTest test flag
* @param suppressUndo undo flag
* @throws SQLException if database error
* @throws AuthorizeException if authorization error
* @throws ParseException if parse error
*/
@Override
/**
* Delete metadata from item
*
* @param context DSpace Context
* @param itarch Item Archive
* @param isTest test flag
* @param suppressUndo undo flag
* @throws SQLException if database error
* @throws AuthorizeException if authorization error
* @throws ParseException if parse error
*/
@Override
public void execute(Context context, ItemArchive itarch, boolean isTest,
boolean suppressUndo) throws AuthorizeException, ParseException, SQLException {
Item item = itarch.getItem();
for (String f : targetFields) {
DtoMetadata dummy = DtoMetadata.create(f, Item.ANY, "");
List<MetadataValue> ardcv = itemService.getMetadataByMetadataString(item, f);
boolean suppressUndo) throws AuthorizeException, ParseException, SQLException {
Item item = itarch.getItem();
for (String f : targetFields)
{
DtoMetadata dummy = DtoMetadata.create(f, Item.ANY, "");
List<MetadataValue> ardcv = itemService.getMetadataByMetadataString(item, f);
ItemUpdate.pr("Metadata to be deleted: ");
for (MetadataValue dcv : ardcv) {
ItemUpdate.pr(" " + MetadataUtilities.getDCValueString(dcv));
}
ItemUpdate.pr("Metadata to be deleted: ");
for (MetadataValue dcv : ardcv)
{
ItemUpdate.pr(" " + MetadataUtilities.getDCValueString(dcv));
}
if (!isTest) {
if (!suppressUndo) {
for (MetadataValue dcv : ardcv) {
if (!isTest)
{
if (!suppressUndo)
{
for (MetadataValue dcv : ardcv)
{
MetadataField metadataField = dcv.getMetadataField();
MetadataSchema metadataSchema = metadataField.getMetadataSchema();
itarch.addUndoMetadataField(
DtoMetadata.create(metadataSchema.getName(), metadataField.getElement(),
metadataField.getQualifier(), dcv.getLanguage(), dcv.getValue()));
}
}
itarch.addUndoMetadataField(DtoMetadata.create(metadataSchema.getName(), metadataField.getElement(),
metadataField.getQualifier(), dcv.getLanguage(), dcv.getValue()));
}
}
itemService.clearMetadata(context, item, dummy.schema, dummy.element, dummy.qualifier, Item.ANY);
}
}
}
}
}
}
}

View File

@@ -10,13 +10,15 @@ package org.dspace.app.itemupdate;
import java.util.Properties;
/**
* Bitstream filter to delete from TEXT bundle
* Bitstream filter to delete from TEXT bundle
*
*/
public class DerivativeTextBitstreamFilter extends BitstreamFilterByBundleName {
public DerivativeTextBitstreamFilter() {
props = new Properties();
props.setProperty("bundle", "TEXT");
}
public DerivativeTextBitstreamFilter()
{
props = new Properties();
props.setProperty("bundle", "TEXT");
}
}

View File

@@ -8,131 +8,152 @@
package org.dspace.app.itemupdate;
import java.text.ParseException;
import org.dspace.content.Item;
/**
* A data transfer object class enhancement of org.dspace.content.DCValue, which is deprecated
* Name intended to not conflict with DSpace API classes for similar concepts but not usable in this context
* A data transfer object class enhancement of org.dspace.content.DCValue, which is deprecated
* Name intended to not conflict with DSpace API classes for similar concepts but not usable in this context
*
* Adds some utility methods
* Adds some utility methods
*
* Really not at all general enough but supports Dublin Core and the compound form notation {@code <schema>.<element>[.<qualifier>]}
*
* Does not support wildcard for qualifier
*
* Really not at all general enough but supports Dublin Core and the compound form notation {@code <schema>
* .<element>[.<qualifier>]}
*
* Does not support wildcard for qualifier
*/
class DtoMetadata {
final String schema;
final String element;
final String qualifier;
final String language;
final String value;
class DtoMetadata
{
final String schema;
final String element;
final String qualifier;
final String language;
final String value;
protected DtoMetadata(String schema, String element, String qualifier, String language, String value) {
this.schema = schema;
this.element = element;
this.qualifier = qualifier;
this.language = language;
this.value = value;
protected DtoMetadata(String schema, String element, String qualifier, String language, String value)
{
this.schema = schema;
this.element = element;
this.qualifier = qualifier;
this.language = language;
this.value = value;
}
/**
* Factory method
*
*
* @param schema not null, not empty - 'dc' is the standard case
* @param element not null, not empty
* @param qualifier null; don't allow empty string or * indicating 'any'
* @param language null or empty
* @param value value
* @return DtoMetadata object
* @throws IllegalArgumentException if arg error
*/
public static DtoMetadata create(String schema,
String element,
String qualifier,
String language,
String value)
throws IllegalArgumentException
{
if ((qualifier != null) && (qualifier.equals(Item.ANY) || qualifier.equals("")))
{
throw new IllegalArgumentException("Invalid qualifier: " + qualifier);
}
return new DtoMetadata(schema, element, qualifier, language, value);
}
/**
* Factory method
*
* @param schema not null, not empty - 'dc' is the standard case
* @param element not null, not empty
* @param qualifier null; don't allow empty string or * indicating 'any'
* @param language null or empty
* @param value value
* @return DtoMetadata object
* @throws IllegalArgumentException if arg error
*/
public static DtoMetadata create(String schema,
String element,
String qualifier,
String language,
String value)
throws IllegalArgumentException {
if ((qualifier != null) && (qualifier.equals(Item.ANY) || qualifier.equals(""))) {
throw new IllegalArgumentException("Invalid qualifier: " + qualifier);
}
return new DtoMetadata(schema, element, qualifier, language, value);
/**
* Factory method to create metadata object
*
*
* @param compoundForm of the form <schema>.<element>[.<qualifier>]
* @param language null or empty
* @param value value
* @throws ParseException if parse error
* @throws IllegalArgumentException if arg error
*/
public static DtoMetadata create(String compoundForm, String language, String value)
throws ParseException, IllegalArgumentException
{
String[] ar = MetadataUtilities.parseCompoundForm(compoundForm);
String qual = null;
if (ar.length > 2)
{
qual = ar[2];
}
return create(ar[0], ar[1], qual, language, value);
}
/**
* Factory method to create metadata object
*
* @param compoundForm of the form <schema>.<element>[.<qualifier>]
* @param language null or empty
* @param value value
* @throws ParseException if parse error
* @throws IllegalArgumentException if arg error
*/
public static DtoMetadata create(String compoundForm, String language, String value)
throws ParseException, IllegalArgumentException {
String[] ar = MetadataUtilities.parseCompoundForm(compoundForm);
/**
* Determine if this metadata field matches the specified type:
* schema.element or schema.element.qualifier
*
*
* @param compoundForm of the form <schema>.<element>[.<qualifier>|.*]
* @param wildcard allow wildcards in compoundForm param
* @return whether matches
*/
public boolean matches(String compoundForm, boolean wildcard)
{
String[] ar = compoundForm.split("\\s*\\.\\s*"); //MetadataUtilities.parseCompoundForm(compoundForm);
String qual = null;
if (ar.length > 2) {
qual = ar[2];
}
if ((ar.length < 2) || (ar.length > 3))
{
return false;
}
return create(ar[0], ar[1], qual, language, value);
}
if (!this.schema.equals(ar[0]) || !this.element.equals(ar[1]))
{
return false;
}
/**
* Determine if this metadata field matches the specified type:
* schema.element or schema.element.qualifier
*
* @param compoundForm of the form <schema>.<element>[.<qualifier>|.*]
* @param wildcard allow wildcards in compoundForm param
* @return whether matches
*/
public boolean matches(String compoundForm, boolean wildcard) {
String[] ar = compoundForm.split("\\s*\\.\\s*"); //MetadataUtilities.parseCompoundForm(compoundForm);
if (ar.length == 2)
{
if (this.qualifier != null)
{
return false;
}
}
if ((ar.length < 2) || (ar.length > 3)) {
return false;
}
if (ar.length == 3)
{
if (this.qualifier == null)
{
return false;
}
if (wildcard && ar[2].equals(Item.ANY))
{
return true;
}
if (!this.qualifier.equals(ar[2]))
{
return false;
}
}
return true;
}
if (!this.schema.equals(ar[0]) || !this.element.equals(ar[1])) {
return false;
}
if (ar.length == 2) {
if (this.qualifier != null) {
return false;
}
}
if (ar.length == 3) {
if (this.qualifier == null) {
return false;
}
if (wildcard && ar[2].equals(Item.ANY)) {
return true;
}
if (!this.qualifier.equals(ar[2])) {
return false;
}
}
return true;
}
public String toString() {
String s = "\tSchema: " + schema + " Element: " + element;
if (qualifier != null) {
s += " Qualifier: " + qualifier;
}
s += " Language: " + ((language == null) ? "[null]" : language);
public String toString()
{
String s = "\tSchema: " + schema + " Element: " + element;
if (qualifier != null)
{
s+= " Qualifier: " + qualifier;
}
s+= " Language: " + ((language == null) ? "[null]" : language);
s += " Value: " + value;
return s;
}
}
public String getValue() {
return value;
}
public String getValue()
{
return value;
}
}

View File

@@ -10,11 +10,12 @@ package org.dspace.app.itemupdate;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileWriter;
import java.io.FilenameFilter;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.sql.SQLException;
@@ -22,13 +23,14 @@ import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.UUID;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerConfigurationException;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.TransformerConfigurationException;
import org.apache.log4j.Logger;
import org.dspace.app.util.LocalSchemaFilenameFilter;
@@ -38,18 +40,20 @@ import org.dspace.content.Item;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService;
import org.dspace.core.Context;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.handle.service.HandleService;
import org.w3c.dom.Document;
/**
* Encapsulates the Item in the context of the DSpace Archive Format
* Encapsulates the Item in the context of the DSpace Archive Format
*
*/
public class ItemArchive {
private static final Logger log = Logger.getLogger(ItemArchive.class);
public static final String DUBLIN_CORE_XML = "dublin_core.xml";
public static final String DUBLIN_CORE_XML = "dublin_core.xml";
protected static DocumentBuilder builder = null;
protected Transformer transformer = null;
@@ -66,278 +70,312 @@ public class ItemArchive {
protected HandleService handleService;
protected ItemService itemService;
//constructors
protected ItemArchive() {
//constructors
protected ItemArchive()
{
handleService = HandleServiceFactory.getInstance().getHandleService();
itemService = ContentServiceFactory.getInstance().getItemService();
}
}
/**
* factory method
*
* Minimal requirements for dublin_core.xml for this application
* is the presence of dc.identifier.uri
* which must contain the handle for the item
*
* @param context - The DSpace context
* @param dir - The directory File in the source archive
* @param itemField - The metadata field in which the Item identifier is located
* if null, the default is the handle in the dc.identifier.uri field
* @return ItemArchive object
* @throws Exception if error
*/
public static ItemArchive create(Context context, File dir, String itemField)
throws Exception {
ItemArchive itarch = new ItemArchive();
itarch.dir = dir;
itarch.dirname = dir.getName();
/** factory method
*
* Minimal requirements for dublin_core.xml for this application
* is the presence of dc.identifier.uri
* which must contain the handle for the item
*
* @param context - The DSpace context
* @param dir - The directory File in the source archive
* @param itemField - The metadata field in which the Item identifier is located
* if null, the default is the handle in the dc.identifier.uri field
* @return ItemArchive object
* @throws Exception if error
*
*/
public static ItemArchive create(Context context, File dir, String itemField)
throws Exception
{
ItemArchive itarch = new ItemArchive();
itarch.dir = dir;
itarch.dirname = dir.getName();
InputStream is = null;
try {
try
{
is = new FileInputStream(new File(dir, DUBLIN_CORE_XML));
itarch.dtomList = MetadataUtilities.loadDublinCore(getDocumentBuilder(), is);
//The code to search for local schema files was copied from org.dspace.app.itemimport
// .ItemImportServiceImpl.java
//The code to search for local schema files was copied from org.dspace.app.itemimport.ItemImportServiceImpl.java
File file[] = dir.listFiles(new LocalSchemaFilenameFilter());
for (int i = 0; i < file.length; i++) {
for (int i = 0; i < file.length; i++)
{
is = new FileInputStream(file[i]);
itarch.dtomList.addAll(MetadataUtilities.loadDublinCore(getDocumentBuilder(), is));
}
} finally {
if (is != null) {
}
finally
{
if (is != null)
{
is.close();
}
}
ItemUpdate.pr("Loaded metadata with " + itarch.dtomList.size() + " fields");
ItemUpdate.pr("Loaded metadata with " + itarch.dtomList.size() + " fields");
if (itemField == null) {
itarch.item = itarch.itemFromHandleInput(context); // sets the item instance var and seeds the undo list
} else {
itarch.item = itarch.itemFromMetadataField(context, itemField);
}
if (itemField == null)
{
itarch.item = itarch.itemFromHandleInput(context); // sets the item instance var and seeds the undo list
}
else
{
itarch.item = itarch.itemFromMetadataField(context, itemField);
}
if (itarch.item == null) {
throw new Exception("Item not instantiated: " + itarch.dirname);
}
if (itarch.item == null)
{
throw new Exception("Item not instantiated: " + itarch.dirname);
}
ItemUpdate.prv("item instantiated: " + itarch.item.getHandle());
ItemUpdate.prv("item instantiated: " + itarch.item.getHandle());
return itarch;
}
return itarch;
}
protected static DocumentBuilder getDocumentBuilder()
throws ParserConfigurationException {
if (builder == null) {
builder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
}
return builder;
}
protected static DocumentBuilder getDocumentBuilder()
throws ParserConfigurationException
{
if (builder == null)
{
builder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
}
return builder;
}
/**
* Getter for Transformer
*
* @return Transformer
* @throws TransformerConfigurationException if config error
*/
protected Transformer getTransformer()
throws TransformerConfigurationException {
if (transformer == null) {
transformer = TransformerFactory.newInstance().newTransformer();
}
return transformer;
}
throws TransformerConfigurationException
{
if (transformer == null)
{
transformer = TransformerFactory.newInstance().newTransformer();
}
return transformer;
}
/**
* Getter for the DSpace item referenced in the archive
*
* @return DSpace item
*/
public Item getItem() {
return item;
}
/**
* Getter for the DSpace item referenced in the archive
* @return DSpace item
*/
public Item getItem()
{
return item;
}
/**
* Getter for directory in archive on disk
*
* @return directory in archive
*/
public File getDirectory() {
return dir;
}
/**
* Getter for directory in archive on disk
* @return directory in archive
*/
public File getDirectory()
{
return dir;
}
/**
* Getter for directory name in archive
*
* @return directory name in archive
*/
public String getDirectoryName() {
return dirname;
}
/**
* Getter for directory name in archive
* @return directory name in archive
*/
public String getDirectoryName()
{
return dirname;
}
/**
* Add metadata field to undo list
*
* @param dtom DtoMetadata (represents metadata field)
*/
public void addUndoMetadataField(DtoMetadata dtom) {
this.undoDtomList.add(dtom);
}
/**
* Add metadata field to undo list
* @param dtom DtoMetadata (represents metadata field)
*/
public void addUndoMetadataField(DtoMetadata dtom)
{
this.undoDtomList.add(dtom);
}
/**
* Getter for list of metadata fields
*
* @return list of metadata fields
*/
public List<DtoMetadata> getMetadataFields() {
return dtomList;
}
/**
* Getter for list of metadata fields
* @return list of metadata fields
*/
public List<DtoMetadata> getMetadataFields()
{
return dtomList;
}
/**
* Add bitstream id to delete contents file
*
* @param bitstreamId bitstream ID
*/
public void addUndoDeleteContents(UUID bitstreamId) {
this.undoAddContents.add(bitstreamId);
}
/**
* Add bitstream id to delete contents file
* @param bitstreamId bitstream ID
*/
public void addUndoDeleteContents(UUID bitstreamId)
{
this.undoAddContents.add(bitstreamId);
}
/**
* Obtain item from DSpace based on handle
* This is the default implementation
* that uses the dc.identifier.uri metadatafield
* that contains the item handle as its value
*
* @param context DSpace Context
* @throws SQLException if database error
* @throws Exception if error
* Obtain item from DSpace based on handle
* This is the default implementation
* that uses the dc.identifier.uri metadatafield
* that contains the item handle as its value
* @param context DSpace Context
* @throws SQLException if database error
* @throws Exception if error
*/
private Item itemFromHandleInput(Context context)
throws SQLException, Exception {
DtoMetadata dtom = getMetadataField("dc.identifier.uri");
if (dtom == null) {
throw new Exception("No dc.identier.uri field found for handle");
}
throws SQLException, Exception
{
DtoMetadata dtom = getMetadataField("dc.identifier.uri");
if (dtom == null)
{
throw new Exception("No dc.identier.uri field found for handle");
}
this.addUndoMetadataField(dtom); //seed the undo list with the uri
this.addUndoMetadataField(dtom); //seed the undo list with the uri
String uri = dtom.value;
String uri = dtom.value;
if (!uri.startsWith(ItemUpdate.HANDLE_PREFIX)) {
throw new Exception("dc.identifier.uri for item " + uri
+ " does not begin with prefix: " + ItemUpdate.HANDLE_PREFIX);
}
if (!uri.startsWith(ItemUpdate.HANDLE_PREFIX))
{
throw new Exception("dc.identifier.uri for item " + uri
+ " does not begin with prefix: " + ItemUpdate.HANDLE_PREFIX);
}
String handle = uri.substring(ItemUpdate.HANDLE_PREFIX.length());
String handle = uri.substring(ItemUpdate.HANDLE_PREFIX.length());
DSpaceObject dso = handleService.resolveToObject(context, handle);
if (dso instanceof Item) {
item = (Item) dso;
} else {
ItemUpdate.pr("Warning: item not instantiated");
throw new IllegalArgumentException("Item " + handle + " not instantiated.");
}
return item;
DSpaceObject dso = handleService.resolveToObject(context, handle);
if (dso instanceof Item)
{
item = (Item) dso;
}
else
{
ItemUpdate.pr("Warning: item not instantiated");
throw new IllegalArgumentException("Item " + handle + " not instantiated.");
}
return item;
}
/**
* Find and instantiate Item from the dublin_core.xml based
* on the specified itemField for the item identifier,
* Find and instantiate Item from the dublin_core.xml based
* on the specified itemField for the item identifier,
*
*
* @param context - the DSpace context
* @param itemField - the compound form of the metadata element <schema>.<element>.<qualifier>
* @throws SQLException if database error
* @throws Exception if error
* @throws Exception if error
*/
private Item itemFromMetadataField(Context context, String itemField)
throws SQLException, AuthorizeException, Exception {
DtoMetadata dtom = getMetadataField(itemField);
throws SQLException, AuthorizeException, Exception
{
DtoMetadata dtom = getMetadataField(itemField);
Item item = null;
Item item = null;
if (dtom == null) {
throw new IllegalArgumentException("No field found for item identifier field: " + itemField);
}
ItemUpdate.prv("Metadata field to match for item: " + dtom.toString());
if (dtom == null)
{
throw new IllegalArgumentException("No field found for item identifier field: " + itemField);
}
ItemUpdate.prv("Metadata field to match for item: " + dtom.toString());
this.addUndoMetadataField(dtom); //seed the undo list with the identifier field
this.addUndoMetadataField(dtom); //seed the undo list with the identifier field
Iterator<Item> itr = itemService
.findByMetadataField(context, dtom.schema, dtom.element, dtom.qualifier, dtom.value);
int count = 0;
while (itr.hasNext()) {
item = itr.next();
count++;
}
Iterator<Item> itr = itemService.findByMetadataField(context, dtom.schema, dtom.element, dtom.qualifier, dtom.value);
int count = 0;
while (itr.hasNext())
{
item = itr.next();
count++;
}
ItemUpdate.prv("items matching = " + count);
ItemUpdate.prv("items matching = " + count );
if (count != 1) {
throw new Exception("" + count + " items matching item identifier: " + dtom.value);
}
if (count != 1)
{
throw new Exception ("" + count + " items matching item identifier: " + dtom.value);
}
return item;
return item;
}
/**
* Get DtoMetadata field
*
* @param compoundForm compound form
* @return DtoMetadata field
*/
private DtoMetadata getMetadataField(String compoundForm) {
for (DtoMetadata dtom : dtomList) {
if (dtom.matches(compoundForm, false)) {
return dtom;
}
}
return null;
private DtoMetadata getMetadataField(String compoundForm)
{
for (DtoMetadata dtom : dtomList)
{
if (dtom.matches(compoundForm, false))
{
return dtom;
}
}
return null;
}
/**
* write undo directory and files to Disk in archive format
*
* @param undoDir - the root directory of the undo archive
* @throws IOException if IO error
* @throws ParserConfigurationException if config error
* @throws IOException if IO error
* @throws ParserConfigurationException if config error
* @throws TransformerConfigurationException if transformer config error
* @throws TransformerException if transformer error
* @throws FileNotFoundException if file not found
* @throws TransformerException if transformer error
* @throws FileNotFoundException if file not found
*/
public void writeUndo(File undoDir)
throws IOException, ParserConfigurationException, TransformerConfigurationException,
TransformerException, FileNotFoundException {
// create directory for item
File dir = new File(undoDir, dirname);
if (!dir.exists() && !dir.mkdir()) {
public void writeUndo(File undoDir)
throws IOException, ParserConfigurationException, TransformerConfigurationException,
TransformerException, FileNotFoundException
{
// create directory for item
File dir = new File(undoDir, dirname);
if (!dir.exists() && !dir.mkdir())
{
log.error("Unable to create undo directory");
}
OutputStream out = null;
OutputStream out = null;
try {
try
{
out = new FileOutputStream(new File(dir, "dublin_core.xml"));
Document doc = MetadataUtilities.writeDublinCore(getDocumentBuilder(), undoDtomList);
MetadataUtilities.writeDocument(doc, getTransformer(), out);
// if undo has delete bitstream
if (undoAddContents.size() > 0) {
if (undoAddContents.size() > 0)
{
PrintWriter pw = null;
try {
try
{
File f = new File(dir, ItemUpdate.DELETE_CONTENTS_FILE);
pw = new PrintWriter(new BufferedWriter(new FileWriter(f)));
for (UUID i : undoAddContents) {
for (UUID i : undoAddContents)
{
pw.println(i);
}
} finally {
}
finally
{
pw.close();
}
}
} finally {
if (out != null) {
}
finally
{
if (out != null)
{
out.close();
}
}
}
}
} //end class

View File

@@ -7,27 +7,7 @@
*/
package org.dspace.app.itemupdate;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileWriter;
import java.io.FilenameFilter;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.cli.*;
import org.dspace.content.Item;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService;
@@ -37,69 +17,79 @@ import org.dspace.eperson.EPerson;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService;
import java.io.*;
import java.util.*;
/**
* Provides some batch editing capabilities for items in DSpace:
* Metadata fields - Add, Delete
* Bitstreams - Add, Delete
*
* The design has been for compatibility with ItemImporter
* in the use of the DSpace archive format which is used to
* specify changes on a per item basis. The directory names
* to correspond to each item are arbitrary and will only be
* used for logging purposes. The reference to the item is
* from a required dc.identifier with the item handle to be
* included in the dublin_core.xml (or similar metadata) file.
* Provides some batch editing capabilities for items in DSpace:
* Metadata fields - Add, Delete
* Bitstreams - Add, Delete
*
* Any combination of these actions is permitted in a single run of this class
* The order of actions is important when used in combination.
* It is the responsibility of the calling class (here, ItemUpdate)
* to register UpdateAction classes in the order to which they are
* to be performed.
* The design has been for compatibility with ItemImporter
* in the use of the DSpace archive format which is used to
* specify changes on a per item basis. The directory names
* to correspond to each item are arbitrary and will only be
* used for logging purposes. The reference to the item is
* from a required dc.identifier with the item handle to be
* included in the dublin_core.xml (or similar metadata) file.
*
* Any combination of these actions is permitted in a single run of this class
* The order of actions is important when used in combination.
* It is the responsibility of the calling class (here, ItemUpdate)
* to register UpdateAction classes in the order to which they are
* to be performed.
*
*
* It is unfortunate that so much code needs to be borrowed
* from ItemImport as it is not reusable in private methods, etc.
* Some of this has been placed into the MetadataUtilities class
* for possible reuse elsewhere.
* It is unfortunate that so much code needs to be borrowed
* from ItemImport as it is not reusable in private methods, etc.
* Some of this has been placed into the MetadataUtilities class
* for possible reuse elsewhere.
*
*
* @author W. Hays based on a conceptual design by R. Rodgers
*
*/
public class ItemUpdate {
public static final String SUPPRESS_UNDO_FILENAME = "suppress_undo";
public static final String SUPPRESS_UNDO_FILENAME = "suppress_undo";
public static final String CONTENTS_FILE = "contents";
public static final String DELETE_CONTENTS_FILE = "delete_contents";
public static final String CONTENTS_FILE = "contents";
public static final String DELETE_CONTENTS_FILE = "delete_contents";
public static String HANDLE_PREFIX = null;
public static final Map<String, String> filterAliases = new HashMap<String, String>();
public static String HANDLE_PREFIX = null;
public static final Map<String, String> filterAliases = new HashMap<String, String>();
public static boolean verbose = false;
public static boolean verbose = false;
protected static final EPersonService epersonService = EPersonServiceFactory.getInstance().getEPersonService();
protected static final ItemService itemService = ContentServiceFactory.getInstance().getItemService();
static {
filterAliases.put("ORIGINAL", "org.dspace.app.itemupdate.OriginalBitstreamFilter");
filterAliases
.put("ORIGINAL_AND_DERIVATIVES", "org.dspace.app.itemupdate.OriginalWithDerivativesBitstreamFilter");
filterAliases.put("TEXT", "org.dspace.app.itemupdate.DerivativeTextBitstreamFilter");
filterAliases.put("THUMBNAIL", "org.dspace.app.itemupdate.ThumbnailBitstreamFilter");
}
static
{
filterAliases.put("ORIGINAL", "org.dspace.app.itemupdate.OriginalBitstreamFilter");
filterAliases.put("ORIGINAL_AND_DERIVATIVES", "org.dspace.app.itemupdate.OriginalWithDerivativesBitstreamFilter");
filterAliases.put("TEXT", "org.dspace.app.itemupdate.DerivativeTextBitstreamFilter");
filterAliases.put("THUMBNAIL", "org.dspace.app.itemupdate.ThumbnailBitstreamFilter");
}
// File listing filter to check for folders
static FilenameFilter directoryFilter = new FilenameFilter() {
static FilenameFilter directoryFilter = new FilenameFilter()
{
@Override
public boolean accept(File dir, String n) {
public boolean accept(File dir, String n)
{
File f = new File(dir.getAbsolutePath() + File.separatorChar + n);
return f.isDirectory();
}
};
// File listing filter to check for files (not directories)
static FilenameFilter fileFilter = new FilenameFilter() {
static FilenameFilter fileFilter = new FilenameFilter()
{
@Override
public boolean accept(File dir, String n) {
public boolean accept(File dir, String n)
{
File f = new File(dir.getAbsolutePath() + File.separatorChar + n);
return (f.isFile());
}
@@ -111,40 +101,39 @@ public class ItemUpdate {
protected String eperson;
/**
* @param argv the command line arguments given
*
* @param argv commandline args
*/
public static void main(String[] argv) {
public static void main(String[] argv)
{
// create an options object and populate it
CommandLineParser parser = new PosixParser();
Options options = new Options();
//processing basis for determining items
//processing basis for determining items
//item-specific changes with metadata in source directory with dublin_core.xml files
options.addOption("s", "source", true, "root directory of source dspace archive ");
//actions on items
options.addOption("a", "addmetadata", true,
"add metadata specified for each item; multiples separated by semicolon ';'");
//actions on items
options.addOption("a", "addmetadata", true, "add metadata specified for each item; multiples separated by semicolon ';'");
options.addOption("d", "deletemetadata", true, "delete metadata specified for each item");
options.addOption("A", "addbitstreams", false, "add bitstreams as specified for each item");
// extra work to get optional argument
Option delBitstreamOption = new Option("D", "deletebitstreams", true,
"delete bitstreams as specified for each item");
Option delBitstreamOption = new Option("D", "deletebitstreams", true, "delete bitstreams as specified for each item");
delBitstreamOption.setOptionalArg(true);
delBitstreamOption.setArgName("BitstreamFilter");
options.addOption(delBitstreamOption);
//other params
//other params
options.addOption("e", "eperson", true, "email of eperson doing the update");
options.addOption("i", "itemfield", true,
"optional metadata field that containing item identifier; default is dc.identifier.uri");
options.addOption("i", "itemfield", true, "optional metadata field that containing item identifier; default is dc.identifier.uri");
options.addOption("F", "filter-properties", true, "filter class name; only for deleting bitstream");
options.addOption("v", "verbose", false, "verbose logging");
//special run states
//special run states
options.addOption("t", "test", false, "test run - do not actually import items");
options.addOption("P", "provenance", false, "suppress altering provenance field for bitstream changes");
options.addOption("h", "help", false, "help");
@@ -158,201 +147,236 @@ public class ItemUpdate {
Context context = null;
ItemUpdate iu = new ItemUpdate();
try {
CommandLine line = parser.parse(options, argv);
try
{
CommandLine line = parser.parse(options, argv);
if (line.hasOption('h')) {
HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("ItemUpdate", options);
pr("");
pr("Examples:");
pr(" adding metadata: ItemUpdate -e jsmith@mit.edu -s sourcedir -a dc.contributor -a dc.subject ");
pr(" deleting metadata: ItemUpdate -e jsmith@mit.edu -s sourcedir -d dc.description.other");
pr(" adding bitstreams: ItemUpdate -e jsmith@mit.edu -s sourcedir -A -i dc.identifier");
pr(" deleting bitstreams: ItemUpdate -e jsmith@mit.edu -s sourcedir -D ORIGINAL ");
pr("");
if (line.hasOption('h'))
{
HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("ItemUpdate", options);
pr("");
pr("Examples:");
pr(" adding metadata: ItemUpdate -e jsmith@mit.edu -s sourcedir -a dc.contributor -a dc.subject ");
pr(" deleting metadata: ItemUpdate -e jsmith@mit.edu -s sourcedir -d dc.description.other");
pr(" adding bitstreams: ItemUpdate -e jsmith@mit.edu -s sourcedir -A -i dc.identifier");
pr(" deleting bitstreams: ItemUpdate -e jsmith@mit.edu -s sourcedir -D ORIGINAL ");
pr("");
System.exit(0);
}
System.exit(0);
}
if (line.hasOption('v')) {
verbose = true;
}
if (line.hasOption('v'))
{
verbose = true;
}
if (line.hasOption('P')) {
alterProvenance = false;
pr("Suppressing changes to Provenance field option");
}
if (line.hasOption('P'))
{
alterProvenance = false;
pr("Suppressing changes to Provenance field option");
}
iu.eperson = line.getOptionValue('e'); // db ID or email
iu.eperson = line.getOptionValue('e'); // db ID or email
if (!line.hasOption('s')) { // item specific changes from archive dir
pr("Missing source archive option");
System.exit(1);
}
String sourcedir = line.getOptionValue('s');
if (!line.hasOption('s')) // item specific changes from archive dir
{
pr("Missing source archive option");
System.exit(1);
}
String sourcedir = line.getOptionValue('s');
if (line.hasOption('t')) { //test
isTest = true;
pr("**Test Run** - not actually updating items.");
if (line.hasOption('t')) //test
{
isTest = true;
pr("**Test Run** - not actually updating items.");
}
}
if (line.hasOption('i')) {
itemField = line.getOptionValue('i');
}
if (line.hasOption('i'))
{
itemField = line.getOptionValue('i');
}
if (line.hasOption('d')) {
String[] targetFields = line.getOptionValues('d');
if (line.hasOption('d'))
{
String[] targetFields = line.getOptionValues('d');
DeleteMetadataAction delMetadataAction = (DeleteMetadataAction) iu.actionMgr
.getUpdateAction(DeleteMetadataAction.class);
delMetadataAction.addTargetFields(targetFields);
DeleteMetadataAction delMetadataAction = (DeleteMetadataAction) iu.actionMgr.getUpdateAction(DeleteMetadataAction.class);
delMetadataAction.addTargetFields(targetFields);
//undo is an add
for (String field : targetFields) {
iu.undoActionList.add(" -a " + field + " ");
}
//undo is an add
for (String field : targetFields)
{
iu.undoActionList.add(" -a " + field + " ");
}
pr("Delete metadata for fields: ");
for (String s : targetFields) {
pr(" " + s);
}
}
pr("Delete metadata for fields: ");
for (String s : targetFields)
{
pr(" " + s);
}
}
if (line.hasOption('a')) {
String[] targetFields = line.getOptionValues('a');
if (line.hasOption('a'))
{
String[] targetFields = line.getOptionValues('a');
AddMetadataAction addMetadataAction = (AddMetadataAction) iu.actionMgr
.getUpdateAction(AddMetadataAction.class);
addMetadataAction.addTargetFields(targetFields);
AddMetadataAction addMetadataAction = (AddMetadataAction) iu.actionMgr.getUpdateAction(AddMetadataAction.class);
addMetadataAction.addTargetFields(targetFields);
//undo is a delete followed by an add of a replace record for target fields
for (String field : targetFields) {
iu.undoActionList.add(" -d " + field + " ");
}
//undo is a delete followed by an add of a replace record for target fields
for (String field : targetFields)
{
iu.undoActionList.add(" -d " + field + " ");
}
for (String field : targetFields) {
iu.undoActionList.add(" -a " + field + " ");
}
for (String field : targetFields)
{
iu.undoActionList.add(" -a " + field + " ");
}
pr("Add metadata for fields: ");
for (String s : targetFields) {
pr(" " + s);
}
}
pr("Add metadata for fields: ");
for (String s : targetFields)
{
pr(" " + s);
}
}
if (line.hasOption('D')) { // undo not supported
pr("Delete bitstreams ");
if (line.hasOption('D')) // undo not supported
{
pr("Delete bitstreams ");
String[] filterNames = line.getOptionValues('D');
if ((filterNames != null) && (filterNames.length > 1)) {
pr("Error: Only one filter can be a used at a time.");
System.exit(1);
}
String[] filterNames = line.getOptionValues('D');
if ((filterNames != null) && (filterNames.length > 1))
{
pr("Error: Only one filter can be a used at a time.");
System.exit(1);
}
String filterName = line.getOptionValue('D');
pr("Filter argument: " + filterName);
String filterName = line.getOptionValue('D');
pr("Filter argument: " + filterName);
if (filterName == null) { // indicates using delete_contents files
DeleteBitstreamsAction delAction = (DeleteBitstreamsAction) iu.actionMgr
.getUpdateAction(DeleteBitstreamsAction.class);
delAction.setAlterProvenance(alterProvenance);
} else {
// check if param is on ALIAS list
String filterClassname = filterAliases.get(filterName);
if (filterName == null) // indicates using delete_contents files
{
DeleteBitstreamsAction delAction = (DeleteBitstreamsAction) iu.actionMgr.getUpdateAction(DeleteBitstreamsAction.class);
delAction.setAlterProvenance(alterProvenance);
}
else
{
// check if param is on ALIAS list
String filterClassname = filterAliases.get(filterName);
if (filterClassname == null) {
filterClassname = filterName;
}
if (filterClassname == null)
{
filterClassname = filterName;
}
BitstreamFilter filter = null;
BitstreamFilter filter = null;
try {
Class<?> cfilter = Class.forName(filterClassname);
pr("BitstreamFilter class to instantiate: " + cfilter.toString());
try
{
Class<?> cfilter = Class.forName(filterClassname);
pr("BitstreamFilter class to instantiate: " + cfilter.toString());
filter = (BitstreamFilter) cfilter.newInstance(); //unfortunate cast, an erasure consequence
} catch (Exception e) {
pr("Error: Failure instantiating bitstream filter class: " + filterClassname);
System.exit(1);
}
filter = (BitstreamFilter) cfilter.newInstance(); //unfortunate cast, an erasure consequence
}
catch(Exception e)
{
pr("Error: Failure instantiating bitstream filter class: " + filterClassname);
System.exit(1);
}
String filterPropertiesName = line.getOptionValue('F');
if (filterPropertiesName != null) { //not always required
try {
// TODO try multiple relative locations, e.g. source dir
if (!filterPropertiesName.startsWith("/")) {
filterPropertiesName = sourcedir + File.separator + filterPropertiesName;
}
String filterPropertiesName = line.getOptionValue('F');
if (filterPropertiesName != null) //not always required
{
try
{
// TODO try multiple relative locations, e.g. source dir
if (!filterPropertiesName.startsWith("/"))
{
filterPropertiesName = sourcedir + File.separator + filterPropertiesName;
}
filter.initProperties(filterPropertiesName);
} catch (Exception e) {
pr("Error: Failure finding properties file for bitstream filter class: " +
filterPropertiesName);
System.exit(1);
}
}
filter.initProperties(filterPropertiesName);
}
catch(Exception e)
{
pr("Error: Failure finding properties file for bitstream filter class: " + filterPropertiesName);
System.exit(1);
}
}
DeleteBitstreamsByFilterAction delAction =
(DeleteBitstreamsByFilterAction) iu.actionMgr
.getUpdateAction(DeleteBitstreamsByFilterAction.class);
delAction.setAlterProvenance(alterProvenance);
delAction.setBitstreamFilter(filter);
//undo not supported
}
}
DeleteBitstreamsByFilterAction delAction =
(DeleteBitstreamsByFilterAction) iu.actionMgr.getUpdateAction(DeleteBitstreamsByFilterAction.class);
delAction.setAlterProvenance(alterProvenance);
delAction.setBitstreamFilter(filter);
//undo not supported
}
}
if (line.hasOption('A')) {
pr("Add bitstreams ");
AddBitstreamsAction addAction = (AddBitstreamsAction) iu.actionMgr
.getUpdateAction(AddBitstreamsAction.class);
addAction.setAlterProvenance(alterProvenance);
if (line.hasOption('A'))
{
pr("Add bitstreams ");
AddBitstreamsAction addAction = (AddBitstreamsAction) iu.actionMgr.getUpdateAction(AddBitstreamsAction.class);
addAction.setAlterProvenance(alterProvenance);
iu.undoActionList.add(" -D "); // delete_contents file will be written, no arg required
}
iu.undoActionList.add(" -D "); // delete_contents file will be written, no arg required
}
if (!iu.actionMgr.hasActions()) {
if (!iu.actionMgr.hasActions())
{
pr("Error - an action must be specified");
System.exit(1);
} else {
pr("Actions to be performed: ");
}
else
{
pr("Actions to be performed: ");
for (UpdateAction ua : iu.actionMgr) {
pr(" " + ua.getClass().getName());
}
}
for (UpdateAction ua : iu.actionMgr)
{
pr(" " + ua.getClass().getName());
}
}
pr("ItemUpdate - initializing run on " + (new Date()).toString());
pr("ItemUpdate - initializing run on " + (new Date()).toString());
context = new Context(Context.Mode.BATCH_EDIT);
iu.setEPerson(context, iu.eperson);
context.turnOffAuthorisationSystem();
context = new Context(Context.Mode.BATCH_EDIT);
iu.setEPerson(context, iu.eperson);
context.turnOffAuthorisationSystem();
HANDLE_PREFIX = ConfigurationManager.getProperty("handle.canonical.prefix");
if (HANDLE_PREFIX == null || HANDLE_PREFIX.length() == 0) {
HANDLE_PREFIX = "http://hdl.handle.net/";
}
HANDLE_PREFIX = ConfigurationManager.getProperty("handle.canonical.prefix");
if (HANDLE_PREFIX == null || HANDLE_PREFIX.length() == 0)
{
HANDLE_PREFIX = "http://hdl.handle.net/";
}
iu.processArchive(context, sourcedir, itemField, metadataIndexName, alterProvenance, isTest);
iu.processArchive(context, sourcedir, itemField, metadataIndexName, alterProvenance, isTest);
context.complete(); // complete all transactions
} catch (Exception e) {
if (context != null && context.isValid()) {
context.complete(); // complete all transactions
}
catch (Exception e)
{
if (context != null && context.isValid())
{
context.abort();
}
e.printStackTrace();
pr(e.toString());
status = 1;
} finally {
context.restoreAuthSystemState();
}
finally {
context.restoreAuthSystemState();
}
if (isTest) {
if (isTest)
{
pr("***End of Test Run***");
} else {
pr("End.");
}
else
{
pr("End.");
}
System.exit(status);
@@ -360,22 +384,23 @@ public class ItemUpdate {
/**
* process an archive
*
* @param context DSpace Context
* @param sourceDirPath source path
* @param itemField item field
* @param context DSpace Context
* @param sourceDirPath source path
* @param itemField item field
* @param metadataIndexName index name
* @param alterProvenance whether to alter provenance
* @param isTest test flag
* @param alterProvenance whether to alter provenance
* @param isTest test flag
* @throws Exception if error
*/
protected void processArchive(Context context, String sourceDirPath, String itemField,
String metadataIndexName, boolean alterProvenance, boolean isTest)
throws Exception {
String metadataIndexName, boolean alterProvenance, boolean isTest)
throws Exception
{
// open and process the source directory
File sourceDir = new File(sourceDirPath);
if ((sourceDir == null) || !sourceDir.exists() || !sourceDir.isDirectory()) {
if ((sourceDir == null) || !sourceDir.exists() || !sourceDir.isDirectory())
{
pr("Error, cannot open archive source directory " + sourceDirPath);
throw new Exception("error with archive source directory " + sourceDirPath);
}
@@ -386,75 +411,92 @@ public class ItemUpdate {
//Undo is suppressed to prevent undo of undo
boolean suppressUndo = false;
File fSuppressUndo = new File(sourceDir, SUPPRESS_UNDO_FILENAME);
if (fSuppressUndo.exists()) {
suppressUndo = true;
if (fSuppressUndo.exists())
{
suppressUndo = true;
}
File undoDir = null; //sibling directory of source archive
if (!suppressUndo && !isTest) {
undoDir = initUndoArchive(sourceDir);
}
if (!suppressUndo && !isTest)
{
undoDir = initUndoArchive(sourceDir);
}
int itemCount = 0;
int successItemCount = 0;
int itemCount = 0;
int successItemCount = 0;
for (String dirname : dircontents) {
itemCount++;
pr("");
pr("processing item " + dirname);
for (String dirname : dircontents)
{
itemCount++;
pr("");
pr("processing item " + dirname);
try {
ItemArchive itarch = ItemArchive.create(context, new File(sourceDir, dirname), itemField);
try
{
ItemArchive itarch = ItemArchive.create(context, new File(sourceDir, dirname), itemField);
for (UpdateAction action : actionMgr) {
pr("action: " + action.getClass().getName());
action.execute(context, itarch, isTest, suppressUndo);
if (!isTest && !suppressUndo) {
for (UpdateAction action : actionMgr)
{
pr("action: " + action.getClass().getName());
action.execute(context, itarch, isTest, suppressUndo);
if (!isTest && !suppressUndo)
{
itarch.writeUndo(undoDir);
}
}
if (!isTest) {
Item item = itarch.getItem();
}
if (!isTest)
{
Item item = itarch.getItem();
itemService.update(context, item); //need to update before commit
context.uncacheEntity(item);
}
ItemUpdate.pr("Item " + dirname + " completed");
successItemCount++;
} catch (Exception e) {
pr("Exception processing item " + dirname + ": " + e.toString());
context.uncacheEntity(item);
}
ItemUpdate.pr("Item " + dirname + " completed");
successItemCount++;
}
catch(Exception e)
{
pr("Exception processing item " + dirname + ": " + e.toString());
e.printStackTrace();
}
}
}
if (!suppressUndo && !isTest) {
StringBuilder sb = new StringBuilder("dsrun org.dspace.app.itemupdate.ItemUpdate ");
sb.append(" -e ").append(this.eperson);
sb.append(" -s ").append(undoDir);
if (!suppressUndo && !isTest)
{
StringBuilder sb = new StringBuilder("dsrun org.dspace.app.itemupdate.ItemUpdate ");
sb.append(" -e ").append(this.eperson);
sb.append(" -s ").append(undoDir);
if (itemField != null) {
sb.append(" -i ").append(itemField);
}
if (itemField != null)
{
sb.append(" -i ").append(itemField);
}
if (!alterProvenance) {
sb.append(" -P ");
}
if (isTest) {
sb.append(" -t ");
}
if (!alterProvenance)
{
sb.append(" -P ");
}
if (isTest)
{
sb.append(" -t ");
}
for (String actionOption : undoActionList) {
sb.append(actionOption);
}
for (String actionOption : undoActionList)
{
sb.append(actionOption);
}
PrintWriter pw = null;
try {
File cmdFile = new File(undoDir.getParent(), undoDir.getName() + "_command.sh");
pw = new PrintWriter(new BufferedWriter(new FileWriter(cmdFile)));
pw.println(sb.toString());
} finally {
pw.close();
}
PrintWriter pw = null;
try
{
File cmdFile = new File (undoDir.getParent(), undoDir.getName() + "_command.sh");
pw = new PrintWriter(new BufferedWriter(new FileWriter(cmdFile)));
pw.println(sb.toString());
}
finally
{
pw.close();
}
}
pr("");
@@ -464,75 +506,85 @@ public class ItemUpdate {
/**
*
* to avoid overwriting the undo source tree on repeated processing
* sequence numbers are added and checked
*
* @param sourceDir - the original source directory
* @return the directory of the undo archive
* @throws FileNotFoundException if file doesn't exist
* @throws IOException if IO error
* @throws IOException if IO error
*/
protected File initUndoArchive(File sourceDir)
throws FileNotFoundException, IOException {
File parentDir = sourceDir.getCanonicalFile().getParentFile();
if (parentDir == null) {
throw new FileNotFoundException(
"Parent directory of archive directory not found; unable to write UndoArchive; no processing " +
"performed");
}
throws FileNotFoundException, IOException
{
File parentDir = sourceDir.getCanonicalFile().getParentFile();
if (parentDir == null)
{
throw new FileNotFoundException("Parent directory of archive directory not found; unable to write UndoArchive; no processing performed");
}
String sourceDirName = sourceDir.getName();
int seqNo = 1;
String sourceDirName = sourceDir.getName();
int seqNo = 1;
File undoDir = new File(parentDir, "undo_" + sourceDirName + "_" + seqNo);
while (undoDir.exists()) {
undoDir = new File(parentDir, "undo_" + sourceDirName + "_" + ++seqNo); //increment
}
File undoDir = new File(parentDir, "undo_" + sourceDirName + "_" + seqNo);
while (undoDir.exists())
{
undoDir = new File(parentDir, "undo_" + sourceDirName+ "_" + ++seqNo); //increment
}
// create root directory
if (!undoDir.mkdir()) {
pr("ERROR creating Undo Archive directory " + undoDir.getCanonicalPath());
throw new IOException("ERROR creating Undo Archive directory " + undoDir.getCanonicalPath());
}
// create root directory
if (!undoDir.mkdir())
{
pr("ERROR creating Undo Archive directory " + undoDir.getCanonicalPath());
throw new IOException("ERROR creating Undo Archive directory " + undoDir.getCanonicalPath());
}
//Undo is suppressed to prevent undo of undo
File fSuppressUndo = new File(undoDir, ItemUpdate.SUPPRESS_UNDO_FILENAME);
try {
fSuppressUndo.createNewFile();
} catch (IOException e) {
pr("ERROR creating Suppress Undo File " + e.toString());
throw e;
try
{
fSuppressUndo.createNewFile();
}
catch(IOException e)
{
pr("ERROR creating Suppress Undo File " + e.toString());
throw e;
}
return undoDir;
}
}
//private void write
//private void write
/**
* Set EPerson doing import
*
* @param context DSpace Context
* @param eperson EPerson obj
* @throws Exception if error
*/
protected void setEPerson(Context context, String eperson)
throws Exception {
if (eperson == null) {
throws Exception
{
if (eperson == null)
{
pr("Error - an eperson to do the importing must be specified");
pr(" (run with -h flag for details)");
throw new Exception("EPerson not specified.");
}
throw new Exception("EPerson not specified."); }
EPerson myEPerson = null;
if (eperson.indexOf('@') != -1) {
if (eperson.indexOf('@') != -1)
{
// @ sign, must be an email
myEPerson = epersonService.findByEmail(context, eperson);
} else {
}
else
{
myEPerson = epersonService.find(context, UUID.fromString(eperson));
}
if (myEPerson == null) {
if (myEPerson == null)
{
pr("Error, eperson cannot be found: " + eperson);
throw new Exception("Invalid EPerson");
}
@@ -541,25 +593,26 @@ public class ItemUpdate {
}
/**
* poor man's logging
* As with ItemImport, API logging goes through log4j to the DSpace.log files
* whereas the batch logging goes to the console to be captured there.
*
* poor man's logging
* As with ItemImport, API logging goes through log4j to the DSpace.log files
* whereas the batch logging goes to the console to be captured there.
* @param s String
*/
static void pr(String s) {
System.out.println(s);
static void pr(String s)
{
System.out.println(s);
}
/**
* print if verbose flag is set
*
* print if verbose flag is set
* @param s String
*/
static void prv(String s) {
if (verbose) {
System.out.println(s);
}
static void prv(String s)
{
if (verbose)
{
System.out.println(s);
}
}
} //end of class

View File

@@ -11,13 +11,14 @@ import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.sql.SQLException;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.List;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.Result;
@@ -30,14 +31,10 @@ import javax.xml.transform.stream.StreamResult;
import org.apache.commons.lang.StringUtils;
import org.apache.xpath.XPathAPI;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Item;
import org.dspace.content.MetadataField;
import org.dspace.content.MetadataSchema;
import org.dspace.content.MetadataValue;
import org.dspace.content.*;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
@@ -46,265 +43,311 @@ import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
import org.dspace.authorize.AuthorizeException;
import org.dspace.core.ConfigurationManager;
/**
* Miscellaneous methods for metadata handling that build on the API
* which might have general utility outside of the specific use
* in context in ItemUpdate.
* Miscellaneous methods for metadata handling that build on the API
* which might have general utility outside of the specific use
* in context in ItemUpdate.
*
* The XML methods were based on those in ItemImport
*
*
* The XML methods were based on those in ItemImport
*/
public class MetadataUtilities {
protected static final ItemService itemService = ContentServiceFactory.getInstance().getItemService();
/**
* Default constructor
*/
private MetadataUtilities() { }
/**
* Working around Item API to delete a value-specific Metadatum
* For a given element/qualifier/lang:
* get all DCValues
* clear (i.e. delete) all of these DCValues
* add them back, minus the one to actually delete
*
* @param context DSpace Context
* @param item Item Object
* @param dtom metadata field
* Working around Item API to delete a value-specific Metadatum
For a given element/qualifier/lang:
get all DCValues
clear (i.e. delete) all of these DCValues
* add them back, minus the one to actually delete
*
* @param context DSpace Context
* @param item Item Object
* @param dtom metadata field
* @param isLanguageStrict whether strict or not
* @return true if metadata field is found with matching value and was deleted
* @throws SQLException if database error
* @return true if metadata field is found with matching value and was deleted
*/
public static boolean deleteMetadataByValue(Context context, Item item, DtoMetadata dtom, boolean isLanguageStrict)
throws SQLException {
List<MetadataValue> ar = null;
public static boolean deleteMetadataByValue(Context context, Item item, DtoMetadata dtom, boolean isLanguageStrict) throws SQLException {
List<MetadataValue> ar = null;
if (isLanguageStrict) { // get all for given type
ar = itemService.getMetadata(item, dtom.schema, dtom.element, dtom.qualifier, dtom.language);
} else {
ar = itemService.getMetadata(item, dtom.schema, dtom.element, dtom.qualifier, Item.ANY);
}
if (isLanguageStrict)
{ // get all for given type
ar = itemService.getMetadata(item, dtom.schema, dtom.element, dtom.qualifier, dtom.language);
}
else
{
ar = itemService.getMetadata(item, dtom.schema, dtom.element, dtom.qualifier, Item.ANY);
}
boolean found = false;
boolean found = false;
//build new set minus the one to delete
List<String> vals = new ArrayList<String>();
for (MetadataValue dcv : ar) {
if (dcv.getValue().equals(dtom.value)) {
found = true;
} else {
vals.add(dcv.getValue());
}
}
//build new set minus the one to delete
List<String> vals = new ArrayList<String>();
for (MetadataValue dcv : ar)
{
if (dcv.getValue().equals(dtom.value))
{
found = true;
}
else
{
vals.add(dcv.getValue());
}
}
if (found) { //remove all for given type ??synchronize this block??
if (isLanguageStrict) {
if (found) //remove all for given type ??synchronize this block??
{
if (isLanguageStrict)
{
itemService.clearMetadata(context, item, dtom.schema, dtom.element, dtom.qualifier, dtom.language);
} else {
}
else
{
itemService.clearMetadata(context, item, dtom.schema, dtom.element, dtom.qualifier, Item.ANY);
}
}
itemService.addMetadata(context, item, dtom.schema, dtom.element, dtom.qualifier, dtom.language, vals);
}
return found;
}
return found;
}
/**
* Append text to value metadata field to item
* Append text to value metadata field to item
*
* @param context DSpace Context
* @param item DSpace Item
* @param dtom metadata field
* @param context DSpace Context
* @param item DSpace Item
* @param dtom metadata field
* @param isLanguageStrict if strict
* @param textToAppend text to append
* @throws IllegalArgumentException - When target metadata field is not found
* @throws SQLException if database error
* @param textToAppend text to append
* @throws IllegalArgumentException - When target metadata field is not found
* @throws SQLException if database error
*/
public static void appendMetadata(Context context, Item item, DtoMetadata dtom, boolean isLanguageStrict,
String textToAppend)
throws IllegalArgumentException, SQLException {
List<MetadataValue> ar = null;
String textToAppend)
throws IllegalArgumentException, SQLException {
List<MetadataValue> ar = null;
// get all values for given element/qualifier
if (isLanguageStrict) { // get all for given element/qualifier
ar = itemService.getMetadata(item, dtom.schema, dtom.element, dtom.qualifier, dtom.language);
} else {
ar = itemService.getMetadata(item, dtom.schema, dtom.element, dtom.qualifier, Item.ANY);
}
// get all values for given element/qualifier
if (isLanguageStrict) // get all for given element/qualifier
{
ar = itemService.getMetadata(item, dtom.schema, dtom.element, dtom.qualifier, dtom.language);
}
else
{
ar = itemService.getMetadata(item, dtom.schema, dtom.element, dtom.qualifier, Item.ANY);
}
if (ar.size() == 0) {
throw new IllegalArgumentException("Metadata to append to not found");
}
if (ar.size() == 0)
{
throw new IllegalArgumentException("Metadata to append to not found");
}
int idx = 0; //index of field to change
if (ar.size() > 1) { //need to pick one, can't be sure it's the last one
// TODO maybe get highest id ?
}
int idx = 0; //index of field to change
if (ar.size() > 1) //need to pick one, can't be sure it's the last one
{
// TODO maybe get highest id ?
}
//build new set minus the one to delete
List<String> vals = new ArrayList<String>();
for (int i = 0; i < ar.size(); i++) {
if (i == idx) {
vals.add(ar.get(i).getValue() + textToAppend);
} else {
vals.add(ar.get(i).getValue());
}
}
//build new set minus the one to delete
List<String> vals = new ArrayList<String>();
for (int i=0; i < ar.size(); i++)
{
if (i == idx)
{
vals.add(ar.get(i).getValue() + textToAppend);
}
else
{
vals.add(ar.get(i).getValue());
}
}
if (isLanguageStrict) {
if (isLanguageStrict)
{
itemService.clearMetadata(context, item, dtom.schema, dtom.element, dtom.qualifier, dtom.language);
} else {
}
else
{
itemService.clearMetadata(context, item, dtom.schema, dtom.element, dtom.qualifier, Item.ANY);
}
}
itemService.addMetadata(context, item, dtom.schema, dtom.element, dtom.qualifier, dtom.language, vals);
}
/**
* Modification of method from ItemImporter.loadDublinCore
* as a Factory method
* Modification of method from ItemImporter.loadDublinCore
* as a Factory method
*
* @param docBuilder DocumentBuilder
* @param is - InputStream of dublin_core.xml
* @param docBuilder DocumentBuilder
* @param is - InputStream of dublin_core.xml
* @return list of DtoMetadata representing the metadata fields relating to an Item
* @throws SQLException if database error
* @throws IOException if IO error
* @throws SQLException if database error
* @throws IOException if IO error
* @throws ParserConfigurationException if parser config error
* @throws SAXException if XML error
* @throws TransformerException if transformer error
* @throws AuthorizeException if authorization error
* @throws SAXException if XML error
* @throws TransformerException if transformer error
* @throws AuthorizeException if authorization error
*/
public static List<DtoMetadata> loadDublinCore(DocumentBuilder docBuilder, InputStream is)
throws SQLException, IOException, ParserConfigurationException,
SAXException, TransformerException, AuthorizeException {
Document document = docBuilder.parse(is);
throws SQLException, IOException, ParserConfigurationException,
SAXException, TransformerException, AuthorizeException
{
Document document = docBuilder.parse(is);
List<DtoMetadata> dtomList = new ArrayList<DtoMetadata>();
List<DtoMetadata> dtomList = new ArrayList<DtoMetadata>();
// Get the schema, for backward compatibility we will default to the
// dublin core schema if the schema name is not available in the import file
String schema = null;
NodeList metadata = XPathAPI.selectNodeList(document, "/dublin_core");
Node schemaAttr = metadata.item(0).getAttributes().getNamedItem("schema");
if (schemaAttr == null) {
schema = MetadataSchema.DC_SCHEMA;
} else {
schema = schemaAttr.getNodeValue();
}
// Get the schema, for backward compatibility we will default to the
// dublin core schema if the schema name is not available in the import file
String schema = null;
NodeList metadata = XPathAPI.selectNodeList(document, "/dublin_core");
Node schemaAttr = metadata.item(0).getAttributes().getNamedItem("schema");
if (schemaAttr == null)
{
schema = MetadataSchema.DC_SCHEMA;
}
else
{
schema = schemaAttr.getNodeValue();
}
// Get the nodes corresponding to formats
NodeList dcNodes = XPathAPI.selectNodeList(document, "/dublin_core/dcvalue");
// Get the nodes corresponding to formats
NodeList dcNodes = XPathAPI.selectNodeList(document, "/dublin_core/dcvalue");
for (int i = 0; i < dcNodes.getLength(); i++) {
Node n = dcNodes.item(i);
String value = getStringValue(n).trim();
// compensate for empty value getting read as "null", which won't display
if (value == null) {
value = "";
}
String element = getAttributeValue(n, "element");
if (element != null) {
element = element.trim();
}
String qualifier = getAttributeValue(n, "qualifier");
if (qualifier != null) {
qualifier = qualifier.trim();
}
String language = getAttributeValue(n, "language");
if (language != null) {
language = language.trim();
}
for (int i = 0; i < dcNodes.getLength(); i++)
{
Node n = dcNodes.item(i);
String value = getStringValue(n).trim();
// compensate for empty value getting read as "null", which won't display
if (value == null)
{
value = "";
}
String element = getAttributeValue(n, "element");
if (element != null)
{
element = element.trim();
}
String qualifier = getAttributeValue(n, "qualifier");
if (qualifier != null)
{
qualifier = qualifier.trim();
}
String language = getAttributeValue(n, "language");
if (language != null)
{
language = language.trim();
}
if ("none".equals(qualifier) || "".equals(qualifier)) {
qualifier = null;
}
if ("none".equals(qualifier) || "".equals(qualifier))
{
qualifier = null;
}
// a goofy default, but consistent with DSpace treatment elsewhere
if (language == null) {
language = "en";
} else if ("".equals(language)) {
language = ConfigurationManager.getProperty("default.language");
}
// a goofy default, but consistent with DSpace treatment elsewhere
if (language == null)
{
language = "en";
}
else if ("".equals(language))
{
language = ConfigurationManager.getProperty("default.language");
}
DtoMetadata dtom = DtoMetadata.create(schema, element, qualifier, language, value);
ItemUpdate.pr(dtom.toString());
dtomList.add(dtom);
}
return dtomList;
}
DtoMetadata dtom = DtoMetadata.create(schema, element, qualifier, language, value);
ItemUpdate.pr(dtom.toString());
dtomList.add(dtom);
}
return dtomList;
}
/**
* Write dublin_core.xml
* Write dublin_core.xml
*
* @param docBuilder DocumentBuilder
* @param dtomList List of metadata fields
* @param dtomList List of metadata fields
* @return xml document
* @throws ParserConfigurationException if parser config error
* @throws ParserConfigurationException if parser config error
* @throws TransformerConfigurationException if transformer config error
* @throws TransformerException if transformer error
* @throws TransformerException if transformer error
*/
public static Document writeDublinCore(DocumentBuilder docBuilder, List<DtoMetadata> dtomList)
throws ParserConfigurationException, TransformerConfigurationException, TransformerException {
public static Document writeDublinCore(DocumentBuilder docBuilder, List<DtoMetadata> dtomList)
throws ParserConfigurationException, TransformerConfigurationException, TransformerException
{
Document doc = docBuilder.newDocument();
Element root = doc.createElement("dublin_core");
doc.appendChild(root);
for (DtoMetadata dtom : dtomList) {
Element mel = doc.createElement("dcvalue");
mel.setAttribute("element", dtom.element);
if (dtom.qualifier == null) {
mel.setAttribute("qualifier", "none");
} else {
mel.setAttribute("qualifier", dtom.qualifier);
}
for (DtoMetadata dtom : dtomList)
{
Element mel = doc.createElement("dcvalue");
mel.setAttribute("element", dtom.element);
if (dtom.qualifier == null)
{
mel.setAttribute("qualifier", "none");
}
else
{
mel.setAttribute("qualifier", dtom.qualifier);
}
if (StringUtils.isEmpty(dtom.language)) {
mel.setAttribute("language", "en");
} else {
mel.setAttribute("language", dtom.language);
}
mel.setTextContent(dtom.value);
root.appendChild(mel);
if (StringUtils.isEmpty(dtom.language))
{
mel.setAttribute("language", "en");
}
else
{
mel.setAttribute("language", dtom.language);
}
mel.setTextContent(dtom.value);
root.appendChild(mel);
}
return doc;
}
}
/**
* write xml document to output stream
*
* @param doc XML Document
* write xml document to output stream
* @param doc XML Document
* @param transformer XML Transformer
* @param out OutputStream
* @throws IOException if IO Error
* @param out OutputStream
* @throws IOException if IO Error
* @throws TransformerException if Transformer error
*/
public static void writeDocument(Document doc, Transformer transformer, OutputStream out)
throws IOException, TransformerException {
public static void writeDocument(Document doc, Transformer transformer, OutputStream out)
throws IOException, TransformerException
{
Source src = new DOMSource(doc);
Result dest = new StreamResult(out);
transformer.transform(src, dest);
}
}
// XML utility methods
/**
* Lookup an attribute from a DOM node.
*
* @param n Node
* @param n Node
* @param name name
* @return attribute value
*/
private static String getAttributeValue(Node n, String name) {
private static String getAttributeValue(Node n, String name)
{
NamedNodeMap nm = n.getAttributes();
for (int i = 0; i < nm.getLength(); i++) {
for (int i = 0; i < nm.getLength(); i++)
{
Node node = nm.item(i);
if (name.equals(node.getNodeName())) {
if (name.equals(node.getNodeName()))
{
return node.getNodeValue();
}
}
@@ -314,17 +357,19 @@ public class MetadataUtilities {
/**
* Return the String value of a Node.
*
* @param node node
* @return string value
*/
private static String getStringValue(Node node) {
private static String getStringValue(Node node)
{
String value = node.getNodeValue();
if (node.hasChildNodes()) {
if (node.hasChildNodes())
{
Node first = node.getFirstChild();
if (first.getNodeType() == Node.TEXT_NODE) {
if (first.getNodeType() == Node.TEXT_NODE)
{
return first.getNodeValue();
}
}
@@ -339,127 +384,151 @@ public class MetadataUtilities {
* @param f file
* @return list of ContentsEntry
* @throws FileNotFoundException if file doesn't exist
* @throws IOException if IO error
* @throws ParseException if parse error
* @throws IOException if IO error
* @throws ParseException if parse error
*/
public static List<ContentsEntry> readContentsFile(File f)
throws FileNotFoundException, IOException, ParseException {
List<ContentsEntry> list = new ArrayList<ContentsEntry>();
throws FileNotFoundException, IOException, ParseException
{
List<ContentsEntry> list = new ArrayList<ContentsEntry>();
BufferedReader in = null;
BufferedReader in = null;
try {
in = new BufferedReader(new FileReader(f));
String line = null;
try
{
in = new BufferedReader(new FileReader(f));
String line = null;
while ((line = in.readLine()) != null) {
line = line.trim();
if ("".equals(line)) {
continue;
}
ItemUpdate.pr("Contents entry: " + line);
list.add(ContentsEntry.parse(line));
}
} finally {
try {
in.close();
} catch (IOException e) {
//skip
}
}
while ((line = in.readLine()) != null)
{
line = line.trim();
if ("".equals(line))
{
continue;
}
ItemUpdate.pr("Contents entry: " + line);
list.add(ContentsEntry.parse(line));
}
}
finally
{
try
{
in.close();
}
catch(IOException e)
{
//skip
}
}
return list;
return list;
}
/**
*
* @param f file
* @return list of lines as strings
* @throws FileNotFoundException if file doesn't exist
* @throws IOException if IO Error
* @throws IOException if IO Error
*/
public static List<String> readDeleteContentsFile(File f)
throws FileNotFoundException, IOException {
List<String> list = new ArrayList<>();
throws FileNotFoundException, IOException
{
List<String> list = new ArrayList<>();
BufferedReader in = null;
BufferedReader in = null;
try {
in = new BufferedReader(new FileReader(f));
String line = null;
try
{
in = new BufferedReader(new FileReader(f));
String line = null;
while ((line = in.readLine()) != null) {
line = line.trim();
if ("".equals(line)) {
continue;
}
while ((line = in.readLine()) != null)
{
line = line.trim();
if ("".equals(line))
{
continue;
}
list.add(line);
}
} finally {
try {
in.close();
} catch (IOException e) {
//skip
}
}
}
}
finally
{
try
{
in.close();
}
catch(IOException e)
{
//skip
}
}
return list;
return list;
}
/**
* Get display of Metadatum
*
* Get display of Metadatum
*
* @param dcv MetadataValue
* @return string displaying elements of the Metadatum
*/
public static String getDCValueString(MetadataValue dcv) {
public static String getDCValueString(MetadataValue dcv)
{
MetadataField metadataField = dcv.getMetadataField();
MetadataSchema metadataSchema = metadataField.getMetadataSchema();
return "schema: " + metadataSchema.getName() + "; element: " + metadataField
.getElement() + "; qualifier: " + metadataField.getQualifier() +
"; language: " + dcv.getLanguage() + "; value: " + dcv.getValue();
return "schema: " + metadataSchema.getName() + "; element: " + metadataField.getElement() + "; qualifier: " + metadataField.getQualifier() +
"; language: " + dcv.getLanguage() + "; value: " + dcv.getValue();
}
/**
* Return compound form of a metadata field (i.e. schema.element.qualifier)
*
* @param schema schema
* @param element element
* @param qualifier qualifier
* @return a String representation of the two- or three-part form of a metadata element
* e.g. dc.identifier.uri
*/
public static String getCompoundForm(String schema, String element, String qualifier) {
StringBuilder sb = new StringBuilder();
sb.append(schema).append(".").append(element);
/**
* Return compound form of a metadata field (i.e. schema.element.qualifier)
* @param schema schema
* @param element element
* @param qualifier qualifier
* @return a String representation of the two- or three-part form of a metadata element
* e.g. dc.identifier.uri
*/
public static String getCompoundForm(String schema, String element, String qualifier)
{
StringBuilder sb = new StringBuilder();
sb.append(schema).append(".").append(element);
if (qualifier != null) {
sb.append(".").append(qualifier);
}
return sb.toString();
}
if (qualifier != null)
{
sb.append(".").append(qualifier);
}
return sb.toString();
}
/**
* Parses metadata field given in the form {@code <schema>.<element>[.<qualifier>|.*]}
* checks for correct number of elements (2 or 3) and for empty strings
*
* @param compoundForm compound form of metadata field
* @return String Array
* @throws ParseException if validity checks fail
*/
public static String[] parseCompoundForm(String compoundForm)
throws ParseException {
String[] ar = compoundForm.split("\\s*\\.\\s*"); //trim ends
/**
* Parses metadata field given in the form {@code <schema>.<element>[.<qualifier>|.*]}
* checks for correct number of elements (2 or 3) and for empty strings
*
* @param compoundForm compound form of metadata field
* @return String Array
* @throws ParseException if validity checks fail
*
*/
public static String[] parseCompoundForm(String compoundForm)
throws ParseException
{
String[] ar = compoundForm.split("\\s*\\.\\s*"); //trim ends
if ("".equals(ar[0])) {
throw new ParseException("schema is empty string: " + compoundForm, 0);
}
if ("".equals(ar[0]))
{
throw new ParseException("schema is empty string: " + compoundForm, 0);
}
if ((ar.length < 2) || (ar.length > 3) || "".equals(ar[1])) {
throw new ParseException("element is malformed or empty string: " + compoundForm, 0);
}
if ((ar.length < 2) || (ar.length > 3) || "".equals(ar[1]))
{
throw new ParseException("element is malformed or empty string: " + compoundForm, 0);
}
return ar;
}
return ar;
}
}

View File

@@ -14,36 +14,44 @@ import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
/**
* Filter all bitstreams in the ORIGINAL bundle
* Also delete all derivative bitstreams, i.e.
* all bitstreams in the TEXT and THUMBNAIL bundles
* Filter all bitstreams in the ORIGINAL bundle
* Also delete all derivative bitstreams, i.e.
* all bitstreams in the TEXT and THUMBNAIL bundles
*/
public class OriginalBitstreamFilter extends BitstreamFilterByBundleName {
public OriginalBitstreamFilter() {
//empty
}
public class OriginalBitstreamFilter extends BitstreamFilterByBundleName
{
public OriginalBitstreamFilter()
{
//empty
}
/**
* Tests bitstreams for containment in an ORIGINAL bundle
*
* @param bitstream Bitstream
* @return true if the bitstream is in the ORIGINAL bundle
* @throws BitstreamFilterException if filter error
*/
@Override
/**
* Tests bitstreams for containment in an ORIGINAL bundle
* @param bitstream Bitstream
* @return true if the bitstream is in the ORIGINAL bundle
*
* @throws BitstreamFilterException if filter error
*/
@Override
public boolean accept(Bitstream bitstream)
throws BitstreamFilterException {
try {
List<Bundle> bundles = bitstream.getBundles();
for (Bundle bundle : bundles) {
if (bundle.getName().equals("ORIGINAL")) {
return true;
}
}
} catch (SQLException e) {
throw new BitstreamFilterException(e);
}
return false;
}
throws BitstreamFilterException
{
try
{
List<Bundle> bundles = bitstream.getBundles();
for (Bundle bundle : bundles)
{
if (bundle.getName().equals("ORIGINAL"))
{
return true;
}
}
}
catch(SQLException e)
{
throw new BitstreamFilterException(e);
}
return false;
}
}

View File

@@ -14,40 +14,49 @@ import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
/**
* Filter all bitstreams in the ORIGINAL bundle
* Also delete all derivative bitstreams, i.e.
* all bitstreams in the TEXT and THUMBNAIL bundles
* Filter all bitstreams in the ORIGINAL bundle
* Also delete all derivative bitstreams, i.e.
* all bitstreams in the TEXT and THUMBNAIL bundles
*/
public class OriginalWithDerivativesBitstreamFilter extends BitstreamFilter {
protected String[] bundlesToEmpty = {"ORIGINAL", "TEXT", "THUMBNAIL"};
public class OriginalWithDerivativesBitstreamFilter extends BitstreamFilter
{
protected String[] bundlesToEmpty = { "ORIGINAL", "TEXT", "THUMBNAIL" };
public OriginalWithDerivativesBitstreamFilter() {
//empty
}
public OriginalWithDerivativesBitstreamFilter()
{
//empty
}
/**
* Tests bitstream for membership in specified bundles (ORIGINAL, TEXT, THUMBNAIL)
*
* @param bitstream Bitstream
* @return true if bitstream is in specified bundles
* @throws BitstreamFilterException if error
*/
@Override
/**
* Tests bitstream for membership in specified bundles (ORIGINAL, TEXT, THUMBNAIL)
*
* @param bitstream Bitstream
* @throws BitstreamFilterException if error
* @return true if bitstream is in specified bundles
*/
@Override
public boolean accept(Bitstream bitstream)
throws BitstreamFilterException {
try {
List<Bundle> bundles = bitstream.getBundles();
for (Bundle b : bundles) {
for (String bn : bundlesToEmpty) {
if (b.getName().equals(bn)) {
return true;
}
}
}
} catch (SQLException e) {
throw new BitstreamFilterException(e);
}
return false;
}
throws BitstreamFilterException
{
try
{
List<Bundle> bundles = bitstream.getBundles();
for (Bundle b : bundles)
{
for (String bn : bundlesToEmpty)
{
if (b.getName().equals(bn))
{
return true;
}
}
}
}
catch(SQLException e)
{
throw new BitstreamFilterException(e);
}
return false;
}
}

View File

@@ -10,13 +10,15 @@ package org.dspace.app.itemupdate;
import java.util.Properties;
/**
* Bitstream filter targetting the THUMBNAIL bundle
* Bitstream filter targetting the THUMBNAIL bundle
*
*/
public class ThumbnailBitstreamFilter extends BitstreamFilterByBundleName {
public ThumbnailBitstreamFilter() {
props = new Properties();
props.setProperty("bundle", "THUMBNAIL");
}
public ThumbnailBitstreamFilter()
{
props = new Properties();
props.setProperty("bundle", "THUMBNAIL");
}
}

View File

@@ -12,22 +12,24 @@ import org.dspace.content.service.ItemService;
import org.dspace.core.Context;
/**
* Interface for actions to update an item
* Interface for actions to update an item
*
*/
public interface UpdateAction {
public interface UpdateAction
{
public ItemService itemService = ContentServiceFactory.getInstance().getItemService();
/**
* Action to update item
*
* @param context DSpace context
* @param itarch item archive
* @param isTest test flag
* @param suppressUndo undo flag
* @throws Exception if error
*/
public void execute(Context context, ItemArchive itarch, boolean isTest, boolean suppressUndo)
throws Exception;
/**
* Action to update item
*
* @param context DSpace context
* @param itarch item archive
* @param isTest test flag
* @param suppressUndo undo flag
* @throws Exception if error
*/
public void execute(Context context, ItemArchive itarch, boolean isTest, boolean suppressUndo)
throws Exception;
}

View File

@@ -12,32 +12,36 @@ import org.dspace.content.service.BitstreamService;
import org.dspace.content.service.BundleService;
/**
* Base class for Bitstream actions
* Base class for Bitstream actions
*
*
*/
public abstract class UpdateBitstreamsAction implements UpdateAction {
protected boolean alterProvenance = true;
protected boolean alterProvenance = true;
protected BundleService bundleService = ContentServiceFactory.getInstance().getBundleService();
protected BitstreamService bitstreamService = ContentServiceFactory.getInstance().getBitstreamService();
/**
* Set variable to indicate that the dc.description.provenance field may
* be changed as a result of Bitstream changes by ItemUpdate
*
* @param alterProvenance whether to alter provenance
*/
public void setAlterProvenance(boolean alterProvenance) {
this.alterProvenance = alterProvenance;
}
/**
* Set variable to indicate that the dc.description.provenance field may
* be changed as a result of Bitstream changes by ItemUpdate
* @param alterProvenance whether to alter provenance
*/
public void setAlterProvenance(boolean alterProvenance)
{
this.alterProvenance = alterProvenance;
}
/**
* @return boolean value to indicate whether the dc.description.provenance field may
* be changed as a result of Bitstream changes by ItemUpdate
*/
public boolean getAlterProvenance() {
return alterProvenance;
}
/**
*
* @return boolean value to indicate whether the dc.description.provenance field may
* be changed as a result of Bitstream changes by ItemUpdate
*/
public boolean getAlterProvenance()
{
return alterProvenance;
}
}

View File

@@ -11,57 +11,60 @@ import java.util.HashSet;
import java.util.Set;
/**
* This abstract subclass for metadata actions
* maintains a collection for the target metadata fields
* expressed as a string in the compound notation ( {@code <schema>.<element>.<qualifier>} )
* on which to apply the action when the method execute is called.
* This abstract subclass for metadata actions
* maintains a collection for the target metadata fields
* expressed as a string in the compound notation ( {@code <schema>.<element>.<qualifier>} )
* on which to apply the action when the method execute is called.
*
* Implemented as a Set to avoid problems with duplicates
*
*
* Implemented as a Set to avoid problems with duplicates
*/
public abstract class UpdateMetadataAction implements UpdateAction {
protected Set<String> targetFields = new HashSet<String>();
protected Set<String> targetFields = new HashSet<String>();
/**
* Get target fields
* Get target fields
*
* @return set of fields to update
*/
public Set<String> getTargetFields() {
return targetFields;
}
public Set<String> getTargetFields() {
return targetFields;
}
/**
* Set target fields
*
* @param targetFields Set of target fields to update
*/
public void addTargetFields(Set<String> targetFields) {
for (String tf : targetFields) {
this.targetFields.add(tf);
}
/**
* Set target fields
*
* @param targetFields Set of target fields to update
*/
public void addTargetFields(Set<String> targetFields) {
for (String tf : targetFields)
{
this.targetFields.add(tf);
}
}
}
/**
* Add array of target fields to update
*
* @param targetFields array of target fields to update
*/
public void addTargetFields(String[] targetFields) {
for (String tf : targetFields) {
this.targetFields.add(tf);
}
/**
* Add array of target fields to update
* @param targetFields array of target fields to update
*/
public void addTargetFields(String[] targetFields) {
for (String tf : targetFields)
{
this.targetFields.add(tf);
}
}
}
/**
* Add single field to update
*
* @param targetField target field to update
*/
public void addTargetField(String targetField) {
this.targetFields.add(targetField);
}
/**
* Add single field to update
*
* @param targetField target field to update
*/
public void addTargetField(String targetField) {
this.targetFields.add(targetField);
}
}

View File

@@ -15,29 +15,29 @@ import java.io.Reader;
import java.io.StreamTokenizer;
import java.util.ArrayList;
import java.util.List;
import org.jdom.Document;
/**
*
* @author mwood
*/
public class CommandRunner {
public class CommandRunner
{
/**
* Default constructor
*/
private CommandRunner() { }
/**
* @param args the command line arguments given
* @throws IOException if IO error
*
* @param args commandline args
* @throws IOException if IO error
* @throws FileNotFoundException if file doesn't exist
*/
public static void main(String[] args)
throws FileNotFoundException, IOException {
if (args.length > 0) {
throws FileNotFoundException, IOException
{
if (args.length > 0)
{
runManyCommands(args[0]);
} else {
}
else
{
runManyCommands("-");
}
// There is no sensible way to use the status returned by runManyCommands().
@@ -54,15 +54,19 @@ public class CommandRunner {
*
* @param script the file of command lines to be executed.
* @return status code
* @throws IOException if IO error
* @throws IOException if IO error
* @throws FileNotFoundException if file doesn't exist
*/
static int runManyCommands(String script)
throws FileNotFoundException, IOException {
throws FileNotFoundException, IOException
{
Reader input;
if ("-".equals(script)) {
if ("-".equals(script))
{
input = new InputStreamReader(System.in);
} else {
}
else
{
input = new FileReader(script);
}
@@ -85,16 +89,22 @@ public class CommandRunner {
int status = 0;
List<String> tokens = new ArrayList<String>();
Document commandConfigs = ScriptLauncher.getConfig();
while (StreamTokenizer.TT_EOF != tokenizer.nextToken()) {
if (StreamTokenizer.TT_EOL == tokenizer.ttype) {
if (tokens.size() > 0) {
while (StreamTokenizer.TT_EOF != tokenizer.nextToken())
{
if (StreamTokenizer.TT_EOL == tokenizer.ttype)
{
if (tokens.size() > 0)
{
status = ScriptLauncher.runOneCommand(commandConfigs, tokens.toArray(new String[tokens.size()]));
if (status > 0) {
if (status > 0)
{
break;
}
tokens.clear();
}
} else {
}
else
{
tokens.add(tokenizer.sval);
}
}

View File

@@ -12,7 +12,6 @@ import java.io.IOException;
import java.lang.reflect.Method;
import java.util.List;
import java.util.TreeMap;
import org.dspace.servicemanager.DSpaceKernelImpl;
import org.dspace.servicemanager.DSpaceKernelInit;
import org.dspace.services.RequestService;
@@ -26,37 +25,38 @@ import org.jdom.input.SAXBuilder;
* @author Stuart Lewis
* @author Mark Diggory
*/
public class ScriptLauncher {
/**
* The service manager kernel
*/
public class ScriptLauncher
{
/** The service manager kernel */
private static transient DSpaceKernelImpl kernelImpl;
/**
* Default constructor
*/
private ScriptLauncher() { }
/**
* Execute the DSpace script launcher
*
* @param args Any parameters required to be passed to the scripts it executes
* @throws IOException if IO error
* @throws IOException if IO error
* @throws FileNotFoundException if file doesn't exist
*/
public static void main(String[] args)
throws FileNotFoundException, IOException {
throws FileNotFoundException, IOException
{
// Initialise the service manager kernel
try {
try
{
kernelImpl = DSpaceKernelInit.getKernel(null);
if (!kernelImpl.isRunning()) {
if (!kernelImpl.isRunning())
{
kernelImpl.start();
}
} catch (Exception e) {
} catch (Exception e)
{
// Failed to start so destroy it and log and throw an exception
try {
try
{
kernelImpl.destroy();
} catch (Exception e1) {
}
catch (Exception e1)
{
// Nothing to do
}
String message = "Failure during kernel init: " + e.getMessage();
@@ -69,7 +69,8 @@ public class ScriptLauncher {
Document commandConfigs = getConfig();
// Check that there is at least one argument (if not display command options)
if (args.length < 1) {
if (args.length < 1)
{
System.err.println("You must provide at least one command argument");
display(commandConfigs);
System.exit(1);
@@ -80,7 +81,8 @@ public class ScriptLauncher {
status = runOneCommand(commandConfigs, args);
// Destroy the service kernel if it is still alive
if (kernelImpl != null) {
if (kernelImpl != null)
{
kernelImpl.destroy();
kernelImpl = null;
}
@@ -88,29 +90,28 @@ public class ScriptLauncher {
System.exit(status);
}
protected static int runOneCommand(Document commandConfigs, String[] args) {
return runOneCommand(commandConfigs, args, kernelImpl);
}
/**
* Recognize and execute a single command.
*
* @param commandConfigs Document
* @param args the command line arguments given
* @param doc Document
* @param args arguments
*/
public static int runOneCommand(Document commandConfigs, String[] args, DSpaceKernelImpl kernelImpl) {
static int runOneCommand(Document commandConfigs, String[] args)
{
String request = args[0];
Element root = commandConfigs.getRootElement();
List<Element> commands = root.getChildren("command");
Element command = null;
for (Element candidate : commands) {
if (request.equalsIgnoreCase(candidate.getChild("name").getValue())) {
for (Element candidate : commands)
{
if (request.equalsIgnoreCase(candidate.getChild("name").getValue()))
{
command = candidate;
break;
}
}
if (null == command) {
if (null == command)
{
// The command wasn't found
System.err.println("Command not found: " + args[0]);
display(commandConfigs);
@@ -119,26 +120,33 @@ public class ScriptLauncher {
// Run each step
List<Element> steps = command.getChildren("step");
for (Element step : steps) {
for (Element step : steps)
{
// Instantiate the class
Class target = null;
// Is it the special case 'dsrun' where the user provides the class name?
String className;
if ("dsrun".equals(request)) {
if (args.length < 2) {
if ("dsrun".equals(request))
{
if (args.length < 2)
{
System.err.println("Error in launcher.xml: Missing class name");
return 1;
}
className = args[1];
} else {
}
else {
className = step.getChild("class").getValue();
}
try {
try
{
target = Class.forName(className,
true,
Thread.currentThread().getContextClassLoader());
} catch (ClassNotFoundException e) {
}
catch (ClassNotFoundException e)
{
System.err.println("Error in launcher.xml: Invalid class name: " + className);
return 1;
}
@@ -149,20 +157,26 @@ public class ScriptLauncher {
Class[] argTypes = {useargs.getClass()};
boolean passargs = true;
if ((step.getAttribute("passuserargs") != null) &&
("false".equalsIgnoreCase(step.getAttribute("passuserargs").getValue()))) {
("false".equalsIgnoreCase(step.getAttribute("passuserargs").getValue())))
{
passargs = false;
}
if ((args.length == 1) || (("dsrun".equals(request)) && (args.length == 2)) || (!passargs)) {
if ((args.length == 1) || (("dsrun".equals(request)) && (args.length == 2)) || (!passargs))
{
useargs = new String[0];
} else {
}
else
{
// The number of arguments to ignore
// If dsrun is the command, ignore the next, as it is the class name not an arg
int x = 1;
if ("dsrun".equals(request)) {
if ("dsrun".equals(request))
{
x = 2;
}
String[] argsnew = new String[useargs.length - x];
for (int i = x; i < useargs.length; i++) {
for (int i = x; i < useargs.length; i++)
{
argsnew[i - x] = useargs[i];
}
useargs = argsnew;
@@ -170,13 +184,16 @@ public class ScriptLauncher {
// Add any extra properties
List<Element> bits = step.getChildren("argument");
if (step.getChild("argument") != null) {
if (step.getChild("argument") != null)
{
String[] argsnew = new String[useargs.length + bits.size()];
int i = 0;
for (Element arg : bits) {
for (Element arg : bits)
{
argsnew[i++] = arg.getValue();
}
for (; i < bits.size() + useargs.length; i++) {
for (; i < bits.size() + useargs.length; i++)
{
argsnew[i] = useargs[i - bits.size()];
}
useargs = argsnew;
@@ -184,10 +201,11 @@ public class ScriptLauncher {
// Establish the request service startup
RequestService requestService = kernelImpl.getServiceManager().getServiceByName(
RequestService.class.getName(), RequestService.class);
if (requestService == null) {
RequestService.class.getName(), RequestService.class);
if (requestService == null)
{
throw new IllegalStateException(
"Could not get the DSpace RequestService to start the request transaction");
"Could not get the DSpace RequestService to start the request transaction");
}
// Establish a request related to the current session
@@ -195,23 +213,26 @@ public class ScriptLauncher {
requestService.startRequest();
// Run the main() method
try {
try
{
Object[] arguments = {useargs};
// Useful for debugging, so left in the code...
/**System.out.print("About to execute: " + className);
for (String param : useargs)
{
System.out.print(" " + param);
}
System.out.println("");**/
for (String param : useargs)
{
System.out.print(" " + param);
}
System.out.println("");**/
Method main = target.getMethod("main", argTypes);
main.invoke(null, arguments);
// ensure we close out the request (happy request)
requestService.endRequest(null);
} catch (Exception e) {
}
catch (Exception e)
{
// Failure occurred in the request so we destroy it
requestService.endRequest(e);
@@ -232,20 +253,20 @@ public class ScriptLauncher {
*
* @return The XML configuration file Document
*/
protected static Document getConfig() {
return getConfig(kernelImpl);
}
public static Document getConfig(DSpaceKernelImpl kernelImpl) {
protected static Document getConfig()
{
// Load the launcher configuration file
String config = kernelImpl.getConfigurationService().getProperty("dspace.dir") +
System.getProperty("file.separator") + "config" +
System.getProperty("file.separator") + "launcher.xml";
System.getProperty("file.separator") + "config" +
System.getProperty("file.separator") + "launcher.xml";
SAXBuilder saxBuilder = new SAXBuilder();
Document doc = null;
try {
try
{
doc = saxBuilder.build(config);
} catch (Exception e) {
}
catch (Exception e)
{
System.err.println("Unable to load the launcher configuration file: [dspace]/config/launcher.xml");
System.err.println(e.getMessage());
e.printStackTrace();
@@ -256,10 +277,10 @@ public class ScriptLauncher {
/**
* Display the commands that the current launcher config file knows about
*
* @param commandConfigs configs as Document
*/
private static void display(Document commandConfigs) {
private static void display(Document commandConfigs)
{
// List all command elements
List<Element> commands = commandConfigs.getRootElement().getChildren("command");
@@ -267,15 +288,17 @@ public class ScriptLauncher {
// We cannot just use commands.sort() because it tries to remove and
// reinsert Elements within other Elements, and that doesn't work.
TreeMap<String, Element> sortedCommands = new TreeMap<>();
for (Element command : commands) {
for (Element command : commands)
{
sortedCommands.put(command.getChild("name").getValue(), command);
}
// Display the sorted list
System.out.println("Usage: dspace [command-name] {parameters}");
for (Element command : sortedCommands.values()) {
for (Element command : sortedCommands.values())
{
System.out.println(" - " + command.getChild("name").getValue() +
": " + command.getChild("description").getValue());
": " + command.getChild("description").getValue());
}
}
}

View File

@@ -7,12 +7,12 @@
*/
package org.dspace.app.mediafilter;
import java.awt.image.BufferedImage;
import java.awt.Color;
import java.awt.Font;
import java.awt.FontMetrics;
import java.awt.Graphics2D;
import java.awt.Rectangle;
import java.awt.image.BufferedImage;
/**
* Class to attach a footer to an image using ImageMagick.
@@ -20,117 +20,143 @@ import java.awt.image.BufferedImage;
* This version of the code is basically Ninh's but reorganised a little. Used with permission.
*/
public class Brand {
private int brandWidth;
private int brandHeight;
private Font font;
private int xOffset;
public class Brand
{
private int brandWidth;
private int brandHeight;
private Font font;
private int xOffset;
/**
* Constructor to set up footer image attributes.
*
* @param brandWidth length of the footer in pixels
* @param brandHeight height of the footer in pixels
* @param font font to use for text on the footer
* @param xOffset number of pixels text should be indented from left-hand side of footer
*/
public Brand(int brandWidth,
int brandHeight,
Font font,
int xOffset) {
this.brandWidth = brandWidth;
this.brandHeight = brandHeight;
this.font = font;
this.xOffset = xOffset;
}
/**
* Constructor to set up footer image attributes.
*
* @param brandWidth length of the footer in pixels
* @param brandHeight height of the footer in pixels
* @param font font to use for text on the footer
* @param xOffset number of pixels text should be indented from left-hand side of footer
*
*/
public Brand(int brandWidth,
int brandHeight,
Font font,
int xOffset)
{
this.brandWidth = brandWidth;
this.brandHeight = brandHeight;
this.font = font;
this.xOffset = xOffset;
}
/**
* Create the brand image
*
* @param brandLeftText text that should appear in the bottom left of the image
* @param shortLeftText abbreviated form of brandLeftText that will be substituted if
* the image is resized such that brandLeftText will not fit. <code>null</code> if not
* required
* @param brandRightText text that should appear in the bottom right of the image
* @return BufferedImage a BufferedImage object describing the brand image file
*/
public BufferedImage create(String brandLeftText,
String shortLeftText,
String brandRightText) {
BrandText[] allBrandText = null;
/**
* Create the brand image
*
* @param brandLeftText text that should appear in the bottom left of the image
* @param shortLeftText abbreviated form of brandLeftText that will be substituted if
* the image is resized such that brandLeftText will not fit. <code>null</code> if not
* required
* @param brandRightText text that should appear in the bottom right of the image
*
* @return BufferedImage a BufferedImage object describing the brand image file
*/
public BufferedImage create(String brandLeftText,
String shortLeftText,
String brandRightText)
{
BrandText[] allBrandText = null;
BufferedImage brandImage =
new BufferedImage(brandWidth, brandHeight, BufferedImage.TYPE_INT_RGB);
BufferedImage brandImage =
new BufferedImage(brandWidth, brandHeight, BufferedImage.TYPE_INT_RGB);
if (brandWidth >= 350) {
allBrandText = new BrandText[] {
new BrandText(BrandText.BL, brandLeftText),
new BrandText(BrandText.BR, brandRightText)
};
} else if (brandWidth >= 190) {
allBrandText = new BrandText[] {
new BrandText(BrandText.BL, shortLeftText),
new BrandText(BrandText.BR, brandRightText)
};
} else {
allBrandText = new BrandText[] {
new BrandText(BrandText.BR, brandRightText)
};
}
if (brandWidth >= 350)
{
allBrandText = new BrandText[]
{
new BrandText(BrandText.BL, brandLeftText),
new BrandText(BrandText.BR, brandRightText)
};
}
else if (brandWidth >= 190)
{
allBrandText = new BrandText[]
{
new BrandText(BrandText.BL, shortLeftText),
new BrandText(BrandText.BR, brandRightText)
};
}
else
{
allBrandText = new BrandText[]
{
new BrandText(BrandText.BR, brandRightText)
};
}
if (allBrandText != null && allBrandText.length > 0) {
for (int i = 0; i < allBrandText.length; ++i) {
drawImage(brandImage, allBrandText[i]);
}
}
if (allBrandText != null && allBrandText.length > 0)
{
for (int i = 0; i < allBrandText.length; ++i)
{
drawImage(brandImage, allBrandText[i]);
}
}
return brandImage;
}
return brandImage;
}
/**
* do the text placements and preparatory work for the brand image generation
*
* @param brandImage a BufferedImage object where the image is created
* @param identifier and Identifier object describing what text is to be placed in what
* position within the brand
*/
private void drawImage(BufferedImage brandImage,
BrandText brandText) {
int imgWidth = brandImage.getWidth();
int imgHeight = brandImage.getHeight();
/**
* do the text placements and preparatory work for the brand image generation
*
* @param brandImage a BufferedImage object where the image is created
* @param identifier and Identifier object describing what text is to be placed in what
* position within the brand
*/
private void drawImage(BufferedImage brandImage,
BrandText brandText)
{
int imgWidth = brandImage.getWidth();
int imgHeight = brandImage.getHeight();
Graphics2D g2 = brandImage.createGraphics();
g2.setFont(font);
FontMetrics fm = g2.getFontMetrics();
int bx, by, tx, ty, bWidth, bHeight;
int bWidth = fm.stringWidth(brandText.getText()) + xOffset * 2 + 1;
int bHeight = fm.getHeight();
Graphics2D g2 = brandImage.createGraphics();
g2.setFont(font);
FontMetrics fm = g2.getFontMetrics();
int bx = 0;
int by = 0;
if (brandText.getLocation().equals(BrandText.TL)) {
bx = 0;
by = 0;
} else if (brandText.getLocation().equals(BrandText.TR)) {
bx = imgWidth - bWidth;
by = 0;
} else if (brandText.getLocation().equals(BrandText.BL)) {
bx = 0;
by = imgHeight - bHeight;
} else if (brandText.getLocation().equals(BrandText.BR)) {
bx = imgWidth - bWidth;
by = imgHeight - bHeight;
}
bWidth = fm.stringWidth(brandText.getText()) + xOffset * 2 + 1;
bHeight = fm.getHeight();
Rectangle box = new Rectangle(bx, by, bWidth, bHeight);
int tx = bx + xOffset;
int ty = by + fm.getAscent();
bx = 0;
by = 0;
g2.setColor(Color.black);
g2.fill(box);
g2.setColor(Color.white);
g2.drawString(brandText.getText(), tx, ty);
}
if (brandText.getLocation().equals(BrandText.TL))
{
bx = 0;
by = 0;
}
else if (brandText.getLocation().equals(BrandText.TR))
{
bx = imgWidth - bWidth;
by = 0;
}
else if (brandText.getLocation().equals(BrandText.BL))
{
bx = 0;
by = imgHeight - bHeight;
}
else if (brandText.getLocation().equals(BrandText.BR))
{
bx = imgWidth - bWidth;
by = imgHeight - bHeight;
}
Rectangle box = new Rectangle(bx, by, bWidth, bHeight);
tx = bx + xOffset;
ty = by + fm.getAscent();
g2.setColor(Color.black);
g2.fill(box);
g2.setColor(Color.white);
g2.drawString(brandText.getText(), tx, ty);
}
}

View File

@@ -13,75 +13,73 @@ package org.dspace.app.mediafilter;
* This is a copy of Picture Australia's PiObj class re-organised with methods.
* Thanks to Ninh Nguyen at the National Library for providing the original source.
*/
class BrandText {
/**
* Bottom Left
*/
public static final String BL = "bl";
/**
* Bottom Right
*/
public static final String BR = "br";
/**
* Top Left
*/
public static final String TL = "tl";
/**
* Top Right
*/
public static final String TR = "tr";
class BrandText
{
/** Bottom Left */
public static final String BL = "bl";
/** Bottom Right */
public static final String BR = "br";
/** Top Left */
public static final String TL = "tl";
/** Top Right */
public static final String TR = "tr";
private String location;
private String text;
private String location;
private String text;
/**
* Constructor for an Identifier object containing a text string and
* its location within a rectangular area.
*
* @param location one of the class location constants e.g. <code>Identifier.BL</code>
* @param the text associated with the location
*/
public BrandText(String location, String text) {
this.location = location;
this.text = text;
}
/**
* Constructor for an Identifier object containing a text string and
* its location within a rectangular area.
*
* @param location one of the class location constants e.g. <code>Identifier.BL</code>
* @param the text associated with the location
*/
public BrandText(String location, String text)
{
this.location = location;
this.text = text;
}
/**
* get the location the text of the Identifier object is associated with
*
* @return String one the class location constants e.g. <code>Identifier.BL</code>
*/
public String getLocation() {
return location;
}
/**
* get the location the text of the Identifier object is associated with
*
* @return String one the class location constants e.g. <code>Identifier.BL</code>
*/
public String getLocation()
{
return location;
}
/**
* get the text associated with the Identifier object
*
* @return String the text associated with the Identifier object
*/
public String getText() {
return text;
}
/**
* get the text associated with the Identifier object
*
* @return String the text associated with the Identifier object
*/
public String getText()
{
return text;
}
/**
* set the location associated with the Identifier object
*
* @param location one of the class location constants
*/
public void setLocation(String location) {
this.location = location;
}
/**
* set the location associated with the Identifier object
*
* @param location one of the class location constants
*/
public void setLocation(String location)
{
this.location = location;
}
/**
* set the text associated with the Identifier object
*
* @param text any text string (typically a branding or identifier)
*/
public void setText(String text) {
this.text = text;
}
/**
* set the text associated with the Identifier object
*
* @param text any text string (typically a branding or identifier)
*/
public void setText(String text)
{
this.text = text;
}
}

View File

@@ -7,13 +7,16 @@
*/
package org.dspace.app.mediafilter;
import java.awt.image.BufferedImage;
import java.awt.image.*;
import java.io.InputStream;
import javax.imageio.ImageIO;
import org.dspace.content.Item;
import org.dspace.core.ConfigurationManager;
import org.dspace.app.mediafilter.JPEGFilter;
/**
* Filter image bitstreams, scaling the image to be within the bounds of
* thumbnail.maxwidth, thumbnail.maxheight, the size we want our thumbnail to be
@@ -21,17 +24,21 @@ import org.dspace.core.ConfigurationManager;
*
* @author Jason Sherman jsherman@usao.edu
*/
public class BrandedPreviewJPEGFilter extends MediaFilter {
public class BrandedPreviewJPEGFilter extends MediaFilter
{
@Override
public String getFilteredName(String oldFilename) {
public String getFilteredName(String oldFilename)
{
return oldFilename + ".preview.jpg";
}
/**
* @return String bundle name
*
*/
@Override
public String getBundleName() {
public String getBundleName()
{
return "BRANDED_PREVIEW";
}
@@ -39,7 +46,8 @@ public class BrandedPreviewJPEGFilter extends MediaFilter {
* @return String bitstreamformat
*/
@Override
public String getFormatString() {
public String getFormatString()
{
return "JPEG";
}
@@ -47,40 +55,42 @@ public class BrandedPreviewJPEGFilter extends MediaFilter {
* @return String description
*/
@Override
public String getDescription() {
public String getDescription()
{
return "Generated Branded Preview";
}
/**
* @param currentItem item
* @param source source input stream
* @param verbose verbose mode
* @param source
* source input stream
* @param verbose verbose mode
*
* @return InputStream the resulting input stream
* @throws Exception if error
*/
@Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception {
throws Exception
{
// read in bitstream's image
BufferedImage buf = ImageIO.read(source);
// get config params
float xmax = (float) ConfigurationManager
.getIntProperty("webui.preview.maxwidth");
.getIntProperty("webui.preview.maxwidth");
float ymax = (float) ConfigurationManager
.getIntProperty("webui.preview.maxheight");
.getIntProperty("webui.preview.maxheight");
boolean blurring = (boolean) ConfigurationManager
.getBooleanProperty("webui.preview.blurring");
.getBooleanProperty("webui.preview.blurring");
boolean hqscaling = (boolean) ConfigurationManager
.getBooleanProperty("webui.preview.hqscaling");
.getBooleanProperty("webui.preview.hqscaling");
int brandHeight = ConfigurationManager.getIntProperty("webui.preview.brand.height");
String brandFont = ConfigurationManager.getProperty("webui.preview.brand.font");
int brandFontPoint = ConfigurationManager.getIntProperty("webui.preview.brand.fontpoint");
JPEGFilter jpegFilter = new JPEGFilter();
return jpegFilter
.getThumbDim(currentItem, buf, verbose, xmax, ymax, blurring, hqscaling, brandHeight, brandFontPoint,
brandFont);
return jpegFilter.getThumbDim(currentItem, buf, verbose, xmax, ymax, blurring, hqscaling, brandHeight, brandFontPoint, brandFont);
}
}

View File

@@ -11,11 +11,12 @@ import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import org.apache.commons.io.IOUtils;
import org.apache.log4j.Logger;
import org.apache.poi.POITextExtractor;
import org.apache.poi.extractor.ExtractorFactory;
import org.apache.poi.hssf.extractor.ExcelExtractor;
import org.apache.poi.xssf.extractor.XSSFExcelExtractor;
import org.apache.log4j.Logger;
import org.dspace.content.Item;
/*
@@ -34,62 +35,79 @@ import org.dspace.content.Item;
* filter.org.dspace.app.mediafilter.ExcelFilter.inputFormats = Microsoft Excel, Microsoft Excel XML
*
*/
public class ExcelFilter extends MediaFilter {
public class ExcelFilter extends MediaFilter
{
private static Logger log = Logger.getLogger(ExcelFilter.class);
public String getFilteredName(String oldFilename) {
public String getFilteredName(String oldFilename)
{
return oldFilename + ".txt";
}
/**
* @return String bundle name
*
*/
public String getBundleName() {
public String getBundleName()
{
return "TEXT";
}
/**
* @return String bitstream format
*
*
*/
public String getFormatString() {
public String getFormatString()
{
return "Text";
}
/**
* @return String description
*/
public String getDescription() {
public String getDescription()
{
return "Extracted text";
}
/**
* @param item item
* @param source source input stream
* @param item item
* @param source source input stream
* @param verbose verbose mode
*
* @return InputStream the resulting input stream
* @throws Exception if error
*/
@Override
public InputStream getDestinationStream(Item item, InputStream source, boolean verbose)
throws Exception {
throws Exception
{
String extractedText = null;
try {
try
{
POITextExtractor theExtractor = ExtractorFactory.createExtractor(source);
if (theExtractor instanceof ExcelExtractor) {
if (theExtractor instanceof ExcelExtractor)
{
// for xls file
extractedText = (theExtractor).getText();
} else if (theExtractor instanceof XSSFExcelExtractor) {
}
else if (theExtractor instanceof XSSFExcelExtractor)
{
// for xlsx file
extractedText = (theExtractor).getText();
}
} catch (Exception e) {
}
catch (Exception e)
{
log.error("Error filtering bitstream: " + e.getMessage(), e);
throw e;
}
if (extractedText != null) {
if (extractedText != null)
{
// generate an input stream with the extracted text
return IOUtils.toInputStream(extractedText, StandardCharsets.UTF_8);
}

View File

@@ -18,46 +18,50 @@ import org.dspace.core.Context;
* from one format to another. This interface should be implemented by any class
* which defines a "filter" to be run by the MediaFilterManager.
*/
public interface FormatFilter {
public interface FormatFilter
{
/**
* Get a filename for a newly created filtered bitstream
*
* @param sourceName name of source bitstream
* @param sourceName
* name of source bitstream
* @return filename generated by the filter - for example, document.pdf
* becomes document.pdf.txt
* becomes document.pdf.txt
*/
public String getFilteredName(String sourceName);
/**
* @return name of the bundle this filter will stick its generated
* Bitstreams
* Bitstreams
*/
public String getBundleName();
/**
* @return name of the bitstream format (say "HTML" or "Microsoft Word")
* returned by this filter look in the bitstream format registry or
* mediafilter.cfg for valid format strings.
* returned by this filter look in the bitstream format registry or
* mediafilter.cfg for valid format strings.
*/
public String getFormatString();
/**
* @return string to describe the newly-generated Bitstream - how it was
* produced is a good idea
* produced is a good idea
*/
public String getDescription();
/**
* Read the source stream and produce the filtered content.
*
* @param item Item
* @param source input stream
* @param item Item
* @param source
* input stream
* @param verbose verbosity flag
*
* @return result of filter's transformation as a byte stream.
* @throws Exception if error
*/
public InputStream getDestinationStream(Item item, InputStream source, boolean verbose)
throws Exception;
throws Exception;
/**
* Perform any pre-processing of the source bitstream *before* the actual
@@ -67,16 +71,18 @@ public interface FormatFilter {
* is necessary). Return false if bitstream should be skipped
* for any reason.
*
* @param c context
* @param item item containing bitstream to process
* @param source source bitstream to be processed
*
* @param c context
* @param item item containing bitstream to process
* @param source source bitstream to be processed
* @param verbose verbose mode
*
* @return true if bitstream processing should continue,
* false if this bitstream should be skipped
* false if this bitstream should be skipped
* @throws Exception if error
*/
public boolean preProcessBitstream(Context c, Item item, Bitstream source, boolean verbose)
throws Exception;
throws Exception;
/**
* Perform any post-processing of the generated bitstream *after* this
@@ -86,13 +92,17 @@ public interface FormatFilter {
* is necessary). Return false if bitstream should be skipped
* for some reason.
*
* @param c context
* @param item item containing bitstream to process
* @param generatedBitstream the bitstream which was generated by
* this filter.
*
* @param c
* context
* @param item
* item containing bitstream to process
* @param generatedBitstream
* the bitstream which was generated by
* this filter.
* @throws Exception if error
*/
public void postProcessBitstream(Context c, Item item, Bitstream generatedBitstream)
throws Exception;
throws Exception;
}

View File

@@ -7,31 +7,36 @@
*/
package org.dspace.app.mediafilter;
import org.dspace.content.Item;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import javax.swing.text.Document;
import javax.swing.text.html.HTMLEditorKit;
import org.dspace.content.Item;
/*
*
* to do: helpful error messages - can't find mediafilter.cfg - can't
* instantiate filter - bitstream format doesn't exist
*
*/
public class HTMLFilter extends MediaFilter {
public class HTMLFilter extends MediaFilter
{
@Override
public String getFilteredName(String oldFilename) {
public String getFilteredName(String oldFilename)
{
return oldFilename + ".txt";
}
/**
* @return String bundle name
*
*/
@Override
public String getBundleName() {
public String getBundleName()
{
return "TEXT";
}
@@ -39,7 +44,8 @@ public class HTMLFilter extends MediaFilter {
* @return String bitstreamformat
*/
@Override
public String getFormatString() {
public String getFormatString()
{
return "Text";
}
@@ -47,20 +53,23 @@ public class HTMLFilter extends MediaFilter {
* @return String description
*/
@Override
public String getDescription() {
public String getDescription()
{
return "Extracted text";
}
/**
* @param currentItem item
* @param source source input stream
* @param verbose verbose mode
* @param source source input stream
* @param verbose verbose mode
*
* @return InputStream the resulting input stream
* @throws Exception if error
*/
@Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception {
throws Exception
{
// try and read the document - set to ignore character set directive,
// assuming that the input stream is already set properly (I hope)
HTMLEditorKit kit = new HTMLEditorKit();

View File

@@ -7,46 +7,53 @@
*/
package org.dspace.app.mediafilter;
import org.dspace.content.Item;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.InputStream;
import java.nio.file.Files;
import org.dspace.content.Item;
/**
* Filter image bitstreams, scaling the image to be within the bounds of
* thumbnail.maxwidth, thumbnail.maxheight, the size we want our thumbnail to be
* no bigger than. Creates only JPEGs.
*/
public class ImageMagickImageThumbnailFilter extends ImageMagickThumbnailFilter {
public class ImageMagickImageThumbnailFilter extends ImageMagickThumbnailFilter
{
/**
* @param currentItem item
* @param source source input stream
* @param verbose verbose mode
* @param source source input stream
* @param verbose verbose mode
*
* @return InputStream the resulting input stream
* @throws Exception if error
*/
@Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception {
File f = inputStreamToTempFile(source, "imthumb", ".tmp");
File f2 = null;
try {
f2 = getThumbnailFile(f, verbose);
byte[] bytes = Files.readAllBytes(f2.toPath());
return new ByteArrayInputStream(bytes);
} finally {
//noinspection ResultOfMethodCallIgnored
f.delete();
if (f2 != null) {
//noinspection ResultOfMethodCallIgnored
f2.delete();
}
}
}
throws Exception
{
File f = inputStreamToTempFile(source, "imthumb", ".tmp");
File f2 = null;
try
{
f2 = getThumbnailFile(f, verbose);
byte[] bytes = Files.readAllBytes(f2.toPath());
return new ByteArrayInputStream(bytes);
}
finally
{
//noinspection ResultOfMethodCallIgnored
f.delete();
if (f2 != null)
{
//noinspection ResultOfMethodCallIgnored
f2.delete();
}
}
}
}

View File

@@ -7,37 +7,43 @@
*/
package org.dspace.app.mediafilter;
import org.dspace.content.Item;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.InputStream;
import java.nio.file.Files;
import org.dspace.content.Item;
public class ImageMagickPdfThumbnailFilter extends ImageMagickThumbnailFilter {
@Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception {
File f = inputStreamToTempFile(source, "impdfthumb", ".pdf");
File f2 = null;
File f3 = null;
try {
f2 = getImageFile(f, 0, verbose);
f3 = getThumbnailFile(f2, verbose);
byte[] bytes = Files.readAllBytes(f3.toPath());
return new ByteArrayInputStream(bytes);
} finally {
//noinspection ResultOfMethodCallIgnored
f.delete();
if (f2 != null) {
//noinspection ResultOfMethodCallIgnored
f2.delete();
}
if (f3 != null) {
//noinspection ResultOfMethodCallIgnored
f3.delete();
}
}
@Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception
{
File f = inputStreamToTempFile(source, "impdfthumb", ".pdf");
File f2 = null;
File f3 = null;
try
{
f2 = getImageFile(f, 0, verbose);
f3 = getThumbnailFile(f2, verbose);
byte[] bytes = Files.readAllBytes(f3.toPath());
return new ByteArrayInputStream(bytes);
}
finally
{
//noinspection ResultOfMethodCallIgnored
f.delete();
if (f2 != null)
{
//noinspection ResultOfMethodCallIgnored
f2.delete();
}
if (f3 != null)
{
//noinspection ResultOfMethodCallIgnored
f3.delete();
}
}
}
}

View File

@@ -14,189 +14,191 @@ import java.io.InputStream;
import java.util.regex.Pattern;
import java.util.regex.PatternSyntaxException;
import javax.imageio.ImageIO;
import org.dspace.content.Bitstream;
import org.dspace.content.Bundle;
import org.dspace.content.Item;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context;
import org.im4java.core.ConvertCmd;
import org.im4java.core.Info;
import org.im4java.core.IM4JavaException;
import org.im4java.core.IMOperation;
import org.im4java.core.Info;
import org.im4java.process.ProcessStarter;
import org.dspace.core.ConfigurationManager;
/**
* Filter image bitstreams, scaling the image to be within the bounds of
* thumbnail.maxwidth, thumbnail.maxheight, the size we want our thumbnail to be
* no bigger than. Creates only JPEGs.
*/
public abstract class ImageMagickThumbnailFilter extends MediaFilter {
protected static int width = 180;
protected static int height = 120;
private static boolean flatten = true;
static String bitstreamDescription = "IM Thumbnail";
static final String defaultPattern = "Generated Thumbnail";
static Pattern replaceRegex = Pattern.compile(defaultPattern);
protected final ItemService itemService = ContentServiceFactory.getInstance().getItemService();
protected static int width = 180;
protected static int height = 120;
private static boolean flatten = true;
static String bitstreamDescription = "IM Thumbnail";
static final String defaultPattern = "Generated Thumbnail";
static Pattern replaceRegex = Pattern.compile(defaultPattern);
protected final ItemService itemService = ContentServiceFactory.getInstance().getItemService();
static String cmyk_profile;
static String srgb_profile;
static String cmyk_profile;
static String srgb_profile;
static {
String pre = ImageMagickThumbnailFilter.class.getName();
String s = ConfigurationManager.getProperty(pre + ".ProcessStarter");
ProcessStarter.setGlobalSearchPath(s);
width = ConfigurationManager.getIntProperty("thumbnail.maxwidth", width);
height = ConfigurationManager.getIntProperty("thumbnail.maxheight", height);
flatten = ConfigurationManager.getBooleanProperty(pre + ".flatten", flatten);
String description = ConfigurationManager.getProperty(pre + ".bitstreamDescription");
cmyk_profile = ConfigurationManager.getProperty(pre + ".cmyk_profile");
srgb_profile = ConfigurationManager.getProperty(pre + ".srgb_profile");
if (description != null) {
bitstreamDescription = description;
}
try {
String patt = ConfigurationManager.getProperty(pre + ".replaceRegex");
replaceRegex = Pattern.compile(patt == null ? defaultPattern : patt);
} catch (PatternSyntaxException e) {
System.err.println("Invalid thumbnail replacement pattern: " + e.getMessage());
}
}
public ImageMagickThumbnailFilter() {
}
@Override
public String getFilteredName(String oldFilename) {
return oldFilename + ".jpg";
}
/**
* @return String bundle name
*/
@Override
public String getBundleName() {
return "THUMBNAIL";
}
/**
* @return String bitstreamformat
*/
@Override
public String getFormatString() {
return "JPEG";
}
/**
* @return String bitstreamDescription
*/
@Override
public String getDescription() {
return bitstreamDescription;
}
public File inputStreamToTempFile(InputStream source, String prefix, String suffix) throws IOException {
File f = File.createTempFile(prefix, suffix);
f.deleteOnExit();
FileOutputStream fos = new FileOutputStream(f);
byte[] buffer = new byte[1024];
int len = source.read(buffer);
while (len != -1) {
fos.write(buffer, 0, len);
len = source.read(buffer);
}
fos.close();
return f;
}
public File getThumbnailFile(File f, boolean verbose)
throws IOException, InterruptedException, IM4JavaException {
File f2 = new File(f.getParentFile(), f.getName() + ".jpg");
f2.deleteOnExit();
ConvertCmd cmd = new ConvertCmd();
IMOperation op = new IMOperation();
op.autoOrient();
op.addImage(f.getAbsolutePath());
op.thumbnail(width, height);
op.addImage(f2.getAbsolutePath());
if (verbose) {
System.out.println("IM Thumbnail Param: " + op);
}
cmd.run(op);
return f2;
}
public File getImageFile(File f, int page, boolean verbose)
throws IOException, InterruptedException, IM4JavaException {
File f2 = new File(f.getParentFile(), f.getName() + ".jpg");
f2.deleteOnExit();
ConvertCmd cmd = new ConvertCmd();
IMOperation op = new IMOperation();
String s = "[" + page + "]";
op.addImage(f.getAbsolutePath() + s);
if (flatten) {
op.flatten();
}
// PDFs using the CMYK color system can be handled specially if
// profiles are defined
if (cmyk_profile != null && srgb_profile != null) {
Info imageInfo = new Info(f.getAbsolutePath(), true);
String imageClass = imageInfo.getImageClass();
if (imageClass.contains("CMYK")) {
op.profile(cmyk_profile);
op.profile(srgb_profile);
}
}
op.addImage(f2.getAbsolutePath());
if (verbose) {
System.out.println("IM Image Param: " + op);
}
cmd.run(op);
return f2;
}
@Override
public boolean preProcessBitstream(Context c, Item item, Bitstream source, boolean verbose) throws Exception {
String nsrc = source.getName();
for (Bundle b : itemService.getBundles(item, "THUMBNAIL")) {
for (Bitstream bit : b.getBitstreams()) {
String n = bit.getName();
if (n != null) {
if (nsrc != null) {
if (!n.startsWith(nsrc)) {
continue;
}
}
}
String description = bit.getDescription();
// If anything other than a generated thumbnail
// is found, halt processing
static {
String pre = ImageMagickThumbnailFilter.class.getName();
String s = ConfigurationManager.getProperty(pre + ".ProcessStarter");
ProcessStarter.setGlobalSearchPath(s);
width = ConfigurationManager.getIntProperty("thumbnail.maxwidth", width);
height = ConfigurationManager.getIntProperty("thumbnail.maxheight", height);
flatten = ConfigurationManager.getBooleanProperty(pre + ".flatten", flatten);
String description = ConfigurationManager.getProperty(pre + ".bitstreamDescription");
cmyk_profile = ConfigurationManager.getProperty(pre + ".cmyk_profile");
srgb_profile = ConfigurationManager.getProperty(pre + ".srgb_profile");
if (description != null) {
if (replaceRegex.matcher(description).matches()) {
if (verbose) {
System.out.println(description + " " + nsrc
+ " matches pattern and is replacable.");
}
continue;
}
if (description.equals(bitstreamDescription)) {
if (verbose) {
System.out.println(bitstreamDescription + " " + nsrc
+ " is replacable.");
}
continue;
}
bitstreamDescription = description;
}
try {
String patt = ConfigurationManager.getProperty(pre + ".replaceRegex");
replaceRegex = Pattern.compile(patt == null ? defaultPattern : patt);
} catch (PatternSyntaxException e) {
System.err.println("Invalid thumbnail replacement pattern: " + e.getMessage());
}
System.out.println("Custom Thumbnail exists for " + nsrc + " for item "
+ item.getHandle() + ". Thumbnail will not be generated. ");
return false;
}
}
return true; // assume that the thumbnail is a custom one
}
public ImageMagickThumbnailFilter() {
}
@Override
public String getFilteredName(String oldFilename) {
return oldFilename + ".jpg";
}
/**
* @return String bundle name
*
*/
@Override
public String getBundleName() {
return "THUMBNAIL";
}
/**
* @return String bitstreamformat
*/
@Override
public String getFormatString() {
return "JPEG";
}
/**
* @return String bitstreamDescription
*/
@Override
public String getDescription() {
return bitstreamDescription;
}
public File inputStreamToTempFile(InputStream source, String prefix, String suffix) throws IOException {
File f = File.createTempFile(prefix, suffix);
f.deleteOnExit();
FileOutputStream fos = new FileOutputStream(f);
byte[] buffer = new byte[1024];
int len = source.read(buffer);
while (len != -1) {
fos.write(buffer, 0, len);
len = source.read(buffer);
}
fos.close();
return f;
}
public File getThumbnailFile(File f, boolean verbose)
throws IOException, InterruptedException, IM4JavaException {
File f2 = new File(f.getParentFile(), f.getName() + ".jpg");
f2.deleteOnExit();
ConvertCmd cmd = new ConvertCmd();
IMOperation op = new IMOperation();
op.addImage(f.getAbsolutePath());
op.thumbnail(width, height);
op.addImage(f2.getAbsolutePath());
if (verbose) {
System.out.println("IM Thumbnail Param: " + op);
}
cmd.run(op);
return f2;
}
public File getImageFile(File f, int page, boolean verbose)
throws IOException, InterruptedException, IM4JavaException {
File f2 = new File(f.getParentFile(), f.getName() + ".jpg");
f2.deleteOnExit();
ConvertCmd cmd = new ConvertCmd();
IMOperation op = new IMOperation();
String s = "[" + page + "]";
op.addImage(f.getAbsolutePath() + s);
if (flatten) {
op.flatten();
}
// PDFs using the CMYK color system can be handled specially if
// profiles are defined
if (cmyk_profile != null && srgb_profile != null) {
Info imageInfo = new Info(f.getAbsolutePath(), true);
String imageClass = imageInfo.getImageClass();
if (imageClass.contains("CMYK")) {
op.profile(cmyk_profile);
op.profile(srgb_profile);
}
}
op.addImage(f2.getAbsolutePath());
if (verbose) {
System.out.println("IM Image Param: " + op);
}
cmd.run(op);
return f2;
}
@Override
public boolean preProcessBitstream(Context c, Item item, Bitstream source, boolean verbose) throws Exception {
String nsrc = source.getName();
for (Bundle b : itemService.getBundles(item, "THUMBNAIL")) {
for (Bitstream bit : b.getBitstreams()) {
String n = bit.getName();
if (n != null) {
if (nsrc != null) {
if (!n.startsWith(nsrc))
continue;
}
}
String description = bit.getDescription();
// If anything other than a generated thumbnail
// is found, halt processing
if (description != null) {
if (replaceRegex.matcher(description).matches()) {
if (verbose) {
System.out.println(description + " " + nsrc
+ " matches pattern and is replacable.");
}
continue;
}
if (description.equals(bitstreamDescription)) {
if (verbose) {
System.out.println(bitstreamDescription + " " + nsrc
+ " is replacable.");
}
continue;
}
}
System.out.println("Custom Thumbnail exists for " + nsrc + " for item "
+ item.getHandle() + ". Thumbnail will not be generated. ");
return false;
}
}
return true; // assume that the thumbnail is a custom one
}
}

View File

@@ -7,18 +7,16 @@
*/
package org.dspace.app.mediafilter;
import java.awt.Color;
import java.awt.Font;
import java.awt.Graphics2D;
import java.awt.Color;
import java.awt.image.*;
import java.awt.RenderingHints;
import java.awt.Transparency;
import java.awt.image.BufferedImage;
import java.awt.image.BufferedImageOp;
import java.awt.image.ConvolveOp;
import java.awt.image.Kernel;
import java.awt.Font;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.InputStream;
import javax.imageio.ImageIO;
import org.dspace.content.Item;
@@ -31,17 +29,21 @@ import org.dspace.core.ConfigurationManager;
*
* @author Jason Sherman jsherman@usao.edu
*/
public class JPEGFilter extends MediaFilter implements SelfRegisterInputFormats {
public class JPEGFilter extends MediaFilter implements SelfRegisterInputFormats
{
@Override
public String getFilteredName(String oldFilename) {
public String getFilteredName(String oldFilename)
{
return oldFilename + ".jpg";
}
/**
* @return String bundle name
*
*/
@Override
public String getBundleName() {
public String getBundleName()
{
return "THUMBNAIL";
}
@@ -49,7 +51,8 @@ public class JPEGFilter extends MediaFilter implements SelfRegisterInputFormats
* @return String bitstreamformat
*/
@Override
public String getFormatString() {
public String getFormatString()
{
return "JPEG";
}
@@ -57,20 +60,23 @@ public class JPEGFilter extends MediaFilter implements SelfRegisterInputFormats
* @return String description
*/
@Override
public String getDescription() {
public String getDescription()
{
return "Generated Thumbnail";
}
/**
* @param currentItem item
* @param source source input stream
* @param verbose verbose mode
* @param source source input stream
* @param verbose verbose mode
*
* @return InputStream the resulting input stream
* @throws Exception if error
*/
@Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception {
throws Exception
{
// read in bitstream's image
BufferedImage buf = ImageIO.read(source);
@@ -78,42 +84,45 @@ public class JPEGFilter extends MediaFilter implements SelfRegisterInputFormats
}
public InputStream getThumb(Item currentItem, BufferedImage buf, boolean verbose)
throws Exception {
throws Exception
{
// get config params
float xmax = (float) ConfigurationManager
.getIntProperty("thumbnail.maxwidth");
.getIntProperty("thumbnail.maxwidth");
float ymax = (float) ConfigurationManager
.getIntProperty("thumbnail.maxheight");
.getIntProperty("thumbnail.maxheight");
boolean blurring = (boolean) ConfigurationManager
.getBooleanProperty("thumbnail.blurring");
.getBooleanProperty("thumbnail.blurring");
boolean hqscaling = (boolean) ConfigurationManager
.getBooleanProperty("thumbnail.hqscaling");
.getBooleanProperty("thumbnail.hqscaling");
return getThumbDim(currentItem, buf, verbose, xmax, ymax, blurring, hqscaling, 0, 0, null);
}
public InputStream getThumbDim(Item currentItem, BufferedImage buf, boolean verbose, float xmax, float ymax,
boolean blurring, boolean hqscaling, int brandHeight, int brandFontPoint,
String brandFont)
throws Exception {
public InputStream getThumbDim(Item currentItem, BufferedImage buf, boolean verbose, float xmax, float ymax, boolean blurring, boolean hqscaling, int brandHeight, int brandFontPoint, String brandFont)
throws Exception
{
// now get the image dimensions
float xsize = (float) buf.getWidth(null);
float ysize = (float) buf.getHeight(null);
// if verbose flag is set, print out dimensions
// to STDOUT
if (verbose) {
if (verbose)
{
System.out.println("original size: " + xsize + "," + ysize);
}
// scale by x first if needed
if (xsize > xmax) {
if (xsize > xmax)
{
// calculate scaling factor so that xsize * scale = new size (max)
float scale_factor = xmax / xsize;
// if verbose flag is set, print out extracted text
// to STDOUT
if (verbose) {
if (verbose)
{
System.out.println("x scale factor: " + scale_factor);
}
@@ -124,13 +133,15 @@ public class JPEGFilter extends MediaFilter implements SelfRegisterInputFormats
// if verbose flag is set, print out extracted text
// to STDOUT
if (verbose) {
if (verbose)
{
System.out.println("size after fitting to maximum width: " + xsize + "," + ysize);
}
}
// scale by y if needed
if (ysize > ymax) {
if (ysize > ymax)
{
float scale_factor = ymax / ysize;
// now reduce x size
@@ -140,28 +151,31 @@ public class JPEGFilter extends MediaFilter implements SelfRegisterInputFormats
}
// if verbose flag is set, print details to STDOUT
if (verbose) {
if (verbose)
{
System.out.println("size after fitting to maximum height: " + xsize + ", "
+ ysize);
+ ysize);
}
// create an image buffer for the thumbnail with the new xsize, ysize
BufferedImage thumbnail = new BufferedImage((int) xsize, (int) ysize,
BufferedImage.TYPE_INT_RGB);
BufferedImage.TYPE_INT_RGB);
// Use blurring if selected in config.
// a little blur before scaling does wonders for keeping moire in check.
if (blurring) {
// send the buffered image off to get blurred.
buf = getBlurredInstance((BufferedImage) buf);
if (blurring)
{
// send the buffered image off to get blurred.
buf = getBlurredInstance((BufferedImage) buf);
}
// Use high quality scaling method if selected in config.
// this has a definite performance penalty.
if (hqscaling) {
// send the buffered image off to get an HQ downscale.
buf = getScaledInstance((BufferedImage) buf, (int) xsize, (int) ysize,
(Object) RenderingHints.VALUE_INTERPOLATION_BICUBIC, (boolean) true);
if (hqscaling)
{
// send the buffered image off to get an HQ downscale.
buf = getScaledInstance((BufferedImage) buf, (int) xsize, (int) ysize,
(Object) RenderingHints.VALUE_INTERPOLATION_BICUBIC, (boolean) true);
}
// now render the image into the thumbnail buffer
@@ -174,7 +188,7 @@ public class JPEGFilter extends MediaFilter implements SelfRegisterInputFormats
ConfigurationManager.getProperty("webui.preview.brand.abbrev"),
currentItem == null ? "" : "hdl:" + currentItem.getHandle());
g2d.drawImage(brandImage, (int) 0, (int) ysize, (int) xsize, (int) 20, null);
g2d.drawImage(brandImage, (int)0, (int)ysize, (int) xsize, (int) 20, null);
}
// now create an input stream for the thumbnail buffer and return it
@@ -190,32 +204,37 @@ public class JPEGFilter extends MediaFilter implements SelfRegisterInputFormats
@Override
public String[] getInputMIMETypes() {
public String[] getInputMIMETypes()
{
return ImageIO.getReaderMIMETypes();
}
@Override
public String[] getInputDescriptions() {
public String[] getInputDescriptions()
{
return null;
}
@Override
public String[] getInputExtensions() {
public String[] getInputExtensions()
{
// Temporarily disabled as JDK 1.6 only
// return ImageIO.getReaderFileSuffixes();
return null;
}
public BufferedImage getNormalizedInstance(BufferedImage buf) {
int type = (buf.getTransparency() == Transparency.OPAQUE) ?
public BufferedImage getNormalizedInstance(BufferedImage buf)
{
int type = (buf.getTransparency() == Transparency.OPAQUE) ?
BufferedImage.TYPE_INT_RGB : BufferedImage.TYPE_INT_ARGB_PRE;
int w = buf.getWidth();
int h = buf.getHeight();
BufferedImage normal = new BufferedImage(w, h, type);
Graphics2D g2d = normal.createGraphics();
g2d.drawImage(buf, 0, 0, w, h, Color.WHITE, null);
g2d.dispose();
return normal;
int w, h;
w = buf.getWidth();
h = buf.getHeight();
BufferedImage normal = new BufferedImage(w, h, type);
Graphics2D g2d = normal.createGraphics();
g2d.drawImage(buf, 0, 0, w, h, Color.WHITE, null);
g2d.dispose();
return normal;
}
/**
@@ -225,54 +244,55 @@ public class JPEGFilter extends MediaFilter implements SelfRegisterInputFormats
* @param buf buffered image
* @return updated BufferedImage
*/
public BufferedImage getBlurredInstance(BufferedImage buf) {
buf = getNormalizedInstance(buf);
public BufferedImage getBlurredInstance(BufferedImage buf)
{
buf = getNormalizedInstance(buf);
// kernel for blur op
float[] matrix = {
0.111f, 0.111f, 0.111f,
0.111f, 0.111f, 0.111f,
0.111f, 0.111f, 0.111f,
};
// kernel for blur op
float[] matrix = {
0.111f, 0.111f, 0.111f,
0.111f, 0.111f, 0.111f,
0.111f, 0.111f, 0.111f,
};
// perform the blur and return the blurred version.
BufferedImageOp blur = new ConvolveOp(new Kernel(3, 3, matrix));
BufferedImage blurbuf = blur.filter(buf, null);
return blurbuf;
// perform the blur and return the blurred version.
BufferedImageOp blur = new ConvolveOp( new Kernel(3, 3, matrix) );
BufferedImage blurbuf = blur.filter(buf, null);
return blurbuf;
}
/**
* Convenience method that returns a scaled instance of the
* provided {@code BufferedImage}.
*
* @param buf the original image to be scaled
* @param targetWidth the desired width of the scaled instance,
* in pixels
* @param targetHeight the desired height of the scaled instance,
* in pixels
* @param hint one of the rendering hints that corresponds to
* {@code RenderingHints.KEY_INTERPOLATION} (e.g.
* {@code RenderingHints.VALUE_INTERPOLATION_NEAREST_NEIGHBOR},
* {@code RenderingHints.VALUE_INTERPOLATION_BILINEAR},
* {@code RenderingHints.VALUE_INTERPOLATION_BICUBIC})
* @param buf the original image to be scaled
* @param targetWidth the desired width of the scaled instance,
* in pixels
* @param targetHeight the desired height of the scaled instance,
* in pixels
* @param hint one of the rendering hints that corresponds to
* {@code RenderingHints.KEY_INTERPOLATION} (e.g.
* {@code RenderingHints.VALUE_INTERPOLATION_NEAREST_NEIGHBOR},
* {@code RenderingHints.VALUE_INTERPOLATION_BILINEAR},
* {@code RenderingHints.VALUE_INTERPOLATION_BICUBIC})
* @param higherQuality if true, this method will use a multi-step
* scaling technique that provides higher quality than the usual
* one-step technique (only useful in downscaling cases, where
* {@code targetWidth} or {@code targetHeight} is
* smaller than the original dimensions, and generally only when
* the {@code BILINEAR} hint is specified)
* scaling technique that provides higher quality than the usual
* one-step technique (only useful in downscaling cases, where
* {@code targetWidth} or {@code targetHeight} is
* smaller than the original dimensions, and generally only when
* the {@code BILINEAR} hint is specified)
* @return a scaled version of the original {@code BufferedImage}
*/
public BufferedImage getScaledInstance(BufferedImage buf,
int targetWidth,
int targetHeight,
Object hint,
boolean higherQuality) {
boolean higherQuality)
{
int type = (buf.getTransparency() == Transparency.OPAQUE) ?
BufferedImage.TYPE_INT_RGB : BufferedImage.TYPE_INT_ARGB;
BufferedImage scalebuf = (BufferedImage) buf;
int w;
int h;
BufferedImage scalebuf = (BufferedImage)buf;
int w, h;
if (higherQuality) {
// Use multi-step technique: start with original size, then
// scale down in multiple passes with drawImage()

View File

@@ -18,8 +18,9 @@ import org.dspace.core.Context;
* by the MediaFilterManager. More complex filters should likely implement the FormatFilter
* interface directly, so that they can define their own pre/postProcessing methods.
*/
public abstract class MediaFilter implements FormatFilter {
/**
public abstract class MediaFilter implements FormatFilter
{
/**
* Perform any pre-processing of the source bitstream *before* the actual
* filtering takes place in MediaFilterManager.processBitstream().
* <p>
@@ -27,17 +28,20 @@ public abstract class MediaFilter implements FormatFilter {
* is necessary). Return false if bitstream should be skipped
* for any reason.
*
* @param c context
* @param item item containing bitstream to process
* @param source source bitstream to be processed
*
* @param c context
* @param item item containing bitstream to process
* @param source source bitstream to be processed
* @param verbose verbose mode
*
* @return true if bitstream processing should continue,
* false if this bitstream should be skipped
* false if this bitstream should be skipped
* @throws Exception if error
*/
@Override
public boolean preProcessBitstream(Context c, Item item, Bitstream source, boolean verbose)
throws Exception {
throws Exception
{
return true; //default to no pre-processing
}
@@ -49,15 +53,20 @@ public abstract class MediaFilter implements FormatFilter {
* is necessary). Return false if bitstream should be skipped
* for some reason.
*
* @param c context
* @param item item containing bitstream to process
* @param generatedBitstream the bitstream which was generated by
* this filter.
* @throws Exception if error
*
* @param c
* context
* @param item
* item containing bitstream to process
* @param generatedBitstream
* the bitstream which was generated by
* this filter.
* @throws java.lang.Exception
*/
@Override
public void postProcessBitstream(Context c, Item item, Bitstream generatedBitstream)
throws Exception {
throws Exception
{
//default to no post-processing necessary
}
}

View File

@@ -7,22 +7,7 @@
*/
package org.dspace.app.mediafilter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.MissingArgumentException;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.cli.*;
import org.dspace.app.mediafilter.factory.MediaFilterServiceFactory;
import org.dspace.app.mediafilter.service.MediaFilterService;
import org.dspace.content.Collection;
@@ -34,6 +19,9 @@ import org.dspace.core.Context;
import org.dspace.core.SelfNamedPlugin;
import org.dspace.core.factory.CoreServiceFactory;
import org.dspace.handle.factory.HandleServiceFactory;
import java.util.*;
import org.apache.commons.lang.ArrayUtils;
import org.dspace.services.factory.DSpaceServicesFactory;
/**
@@ -56,12 +44,8 @@ public class MediaFilterCLITool {
//suffix (in dspace.cfg) for input formats supported by each filter
private static final String INPUT_FORMATS_SUFFIX = "inputFormats";
/**
* Default constructor
*/
private MediaFilterCLITool() { }
public static void main(String[] argv) throws Exception {
public static void main(String[] argv) throws Exception
{
// set headless for non-gui workstations
System.setProperty("java.awt.headless", "true");
@@ -73,25 +57,25 @@ public class MediaFilterCLITool {
Options options = new Options();
options.addOption("v", "verbose", false,
"print all extracted text and other details to STDOUT");
"print all extracted text and other details to STDOUT");
options.addOption("q", "quiet", false,
"do not print anything except in the event of errors.");
"do not print anything except in the event of errors.");
options.addOption("f", "force", false,
"force all bitstreams to be processed");
"force all bitstreams to be processed");
options.addOption("i", "identifier", true,
"ONLY process bitstreams belonging to identifier");
"ONLY process bitstreams belonging to identifier");
options.addOption("m", "maximum", true,
"process no more than maximum items");
"process no more than maximum items");
options.addOption("h", "help", false, "help");
//create a "plugin" option (to specify specific MediaFilter plugins to run)
OptionBuilder.withLongOpt("plugins");
OptionBuilder.withValueSeparator(',');
OptionBuilder.withDescription(
"ONLY run the specified Media Filter plugin(s)\n" +
"listed from '" + MEDIA_FILTER_PLUGINS_KEY + "' in dspace.cfg.\n" +
"Separate multiple with a comma (,)\n" +
"(e.g. MediaFilterManager -p \n\"Word Text Extractor\",\"PDF Text Extractor\")");
"ONLY run the specified Media Filter plugin(s)\n" +
"listed from '" + MEDIA_FILTER_PLUGINS_KEY + "' in dspace.cfg.\n" +
"Separate multiple with a comma (,)\n" +
"(e.g. MediaFilterManager -p \n\"Word Text Extractor\",\"PDF Text Extractor\")");
Option pluginOption = OptionBuilder.create('p');
pluginOption.setArgs(Option.UNLIMITED_VALUES); //unlimited number of args
options.addOption(pluginOption);
@@ -100,9 +84,9 @@ public class MediaFilterCLITool {
OptionBuilder.withLongOpt("skip");
OptionBuilder.withValueSeparator(',');
OptionBuilder.withDescription(
"SKIP the bitstreams belonging to identifier\n" +
"Separate multiple identifiers with a comma (,)\n" +
"(e.g. MediaFilterManager -s \n 123456789/34,123456789/323)");
"SKIP the bitstreams belonging to identifier\n" +
"Separate multiple identifiers with a comma (,)\n" +
"(e.g. MediaFilterManager -s \n 123456789/34,123456789/323)");
Option skipOption = OptionBuilder.create('s');
skipOption.setArgs(Option.UNLIMITED_VALUES); //unlimited number of args
options.addOption(skipOption);
@@ -115,61 +99,73 @@ public class MediaFilterCLITool {
Map<String, List<String>> filterFormats = new HashMap<>();
CommandLine line = null;
try {
try
{
line = parser.parse(options, argv);
} catch (MissingArgumentException e) {
}
catch(MissingArgumentException e)
{
System.out.println("ERROR: " + e.getMessage());
HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("MediaFilterManager\n", options);
System.exit(1);
}
if (line.hasOption('h')) {
if (line.hasOption('h'))
{
HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("MediaFilterManager\n", options);
System.exit(0);
}
if (line.hasOption('v')) {
if (line.hasOption('v'))
{
isVerbose = true;
}
isQuiet = line.hasOption('q');
if (line.hasOption('f')) {
if (line.hasOption('f'))
{
isForce = true;
}
if (line.hasOption('i')) {
if (line.hasOption('i'))
{
identifier = line.getOptionValue('i');
}
if (line.hasOption('m')) {
if (line.hasOption('m'))
{
max2Process = Integer.parseInt(line.getOptionValue('m'));
if (max2Process <= 1) {
if (max2Process <= 1)
{
System.out.println("Invalid maximum value '" +
line.getOptionValue('m') + "' - ignoring");
line.getOptionValue('m') + "' - ignoring");
max2Process = Integer.MAX_VALUE;
}
}
String filterNames[] = null;
if (line.hasOption('p')) {
if(line.hasOption('p'))
{
//specified which media filter plugins we are using
filterNames = line.getOptionValues('p');
if (filterNames == null || filterNames.length == 0) { //display error, since no plugins specified
if(filterNames==null || filterNames.length==0)
{ //display error, since no plugins specified
System.err.println("\nERROR: -p (-plugin) option requires at least one plugin to be specified.\n" +
"(e.g. MediaFilterManager -p \"Word Text Extractor\",\"PDF Text Extractor\")\n");
"(e.g. MediaFilterManager -p \"Word Text Extractor\",\"PDF Text Extractor\")\n");
HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("MediaFilterManager\n", options);
System.exit(1);
}
} else {
}
else
{
//retrieve list of all enabled media filter plugins!
filterNames = DSpaceServicesFactory.getInstance().getConfigurationService()
.getArrayProperty(MEDIA_FILTER_PLUGINS_KEY);
filterNames = DSpaceServicesFactory.getInstance().getConfigurationService().getArrayProperty(MEDIA_FILTER_PLUGINS_KEY);
}
MediaFilterService mediaFilterService = MediaFilterServiceFactory.getInstance().getMediaFilterService();
@@ -182,16 +178,17 @@ public class MediaFilterCLITool {
List<FormatFilter> filterList = new ArrayList<FormatFilter>();
//set up each filter
for (int i = 0; i < filterNames.length; i++) {
for(int i=0; i< filterNames.length; i++)
{
//get filter of this name & add to list of filters
FormatFilter filter = (FormatFilter) CoreServiceFactory.getInstance().getPluginService()
.getNamedPlugin(FormatFilter.class, filterNames[i]);
if (filter == null) {
System.err.println(
"\nERROR: Unknown MediaFilter specified (either from command-line or in dspace.cfg): '" +
filterNames[i] + "'");
FormatFilter filter = (FormatFilter) CoreServiceFactory.getInstance().getPluginService().getNamedPlugin(FormatFilter.class, filterNames[i]);
if(filter==null)
{
System.err.println("\nERROR: Unknown MediaFilter specified (either from command-line or in dspace.cfg): '" + filterNames[i] + "'");
System.exit(1);
} else {
}
else
{
filterList.add(filter);
String filterClassName = filter.getClass().getName();
@@ -203,7 +200,8 @@ public class MediaFilterCLITool {
//each "named" plugin that it defines.
//So, we have to look for every key that fits the
//following format: filter.<class-name>.<plugin-name>.inputFormats
if (SelfNamedPlugin.class.isAssignableFrom(filter.getClass())) {
if( SelfNamedPlugin.class.isAssignableFrom(filter.getClass()) )
{
//Get the plugin instance name for this class
pluginName = ((SelfNamedPlugin) filter).getPluginInstanceName();
}
@@ -215,41 +213,44 @@ public class MediaFilterCLITool {
//For other MediaFilters, format of key is:
// filter.<class-name>.inputFormats
String[] formats =
DSpaceServicesFactory.getInstance().getConfigurationService().getArrayProperty(
DSpaceServicesFactory.getInstance().getConfigurationService().getArrayProperty(
FILTER_PREFIX + "." + filterClassName +
(pluginName != null ? "." + pluginName : "") +
"." + INPUT_FORMATS_SUFFIX);
(pluginName!=null ? "." + pluginName : "") +
"." + INPUT_FORMATS_SUFFIX);
//add to internal map of filters to supported formats
if (ArrayUtils.isNotEmpty(formats)) {
if (ArrayUtils.isNotEmpty(formats))
{
//For SelfNamedPlugins, map key is:
// <class-name><separator><plugin-name>
//For other MediaFilters, map key is just:
// <class-name>
filterFormats.put(filterClassName +
(pluginName != null ? MediaFilterService.FILTER_PLUGIN_SEPARATOR +
pluginName : ""),
Arrays.asList(formats));
(pluginName!=null ? MediaFilterService.FILTER_PLUGIN_SEPARATOR + pluginName : ""),
Arrays.asList(formats));
}
} //end if filter!=null
} //end for
}//end if filter!=null
}//end for
//If verbose, print out loaded mediafilter info
if (isVerbose) {
if(isVerbose)
{
System.out.println("The following MediaFilters are enabled: ");
Iterator<String> i = filterFormats.keySet().iterator();
while (i.hasNext()) {
while(i.hasNext())
{
String filterName = i.next();
System.out.println("Full Filter Name: " + filterName);
String pluginName = null;
if (filterName.contains(MediaFilterService.FILTER_PLUGIN_SEPARATOR)) {
if(filterName.contains(MediaFilterService.FILTER_PLUGIN_SEPARATOR))
{
String[] fields = filterName.split(MediaFilterService.FILTER_PLUGIN_SEPARATOR);
filterName = fields[0];
pluginName = fields[1];
filterName=fields[0];
pluginName=fields[1];
}
System.out.println(filterName +
(pluginName != null ? " (Plugin: " + pluginName + ")" : ""));
(pluginName!=null? " (Plugin: " + pluginName + ")": ""));
}
}
@@ -260,14 +261,16 @@ public class MediaFilterCLITool {
//Retrieve list of identifiers to skip (if any)
String skipIds[] = null;
if (line.hasOption('s')) {
if(line.hasOption('s'))
{
//specified which identifiers to skip when processing
skipIds = line.getOptionValues('s');
if (skipIds == null || skipIds.length == 0) { //display error, since no identifiers specified to skip
if(skipIds==null || skipIds.length==0)
{ //display error, since no identifiers specified to skip
System.err.println("\nERROR: -s (-skip) option requires at least one identifier to SKIP.\n" +
"Make sure to separate multiple identifiers with a comma!\n" +
"(e.g. MediaFilterManager -s 123456789/34,123456789/323)\n");
"Make sure to separate multiple identifiers with a comma!\n" +
"(e.g. MediaFilterManager -s 123456789/34,123456789/323)\n");
HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("MediaFilterManager\n", options);
System.exit(0);
@@ -279,24 +282,29 @@ public class MediaFilterCLITool {
Context c = null;
try {
try
{
c = new Context();
// have to be super-user to do the filtering
c.turnOffAuthorisationSystem();
// now apply the filters
if (identifier == null) {
if (identifier == null)
{
mediaFilterService.applyFiltersAllItems(c);
} else {
// restrict application scope to identifier
}
else // restrict application scope to identifier
{
DSpaceObject dso = HandleServiceFactory.getInstance().getHandleService().resolveToObject(c, identifier);
if (dso == null) {
if (dso == null)
{
throw new IllegalArgumentException("Cannot resolve "
+ identifier + " to a DSpace object");
+ identifier + " to a DSpace object");
}
switch (dso.getType()) {
switch (dso.getType())
{
case Constants.COMMUNITY:
mediaFilterService.applyFiltersCommunity(c, (Community) dso);
break;
@@ -306,17 +314,20 @@ public class MediaFilterCLITool {
case Constants.ITEM:
mediaFilterService.applyFiltersItem(c, (Item) dso);
break;
default:
break;
}
}
c.complete();
c = null;
} catch (Exception e) {
}
catch (Exception e)
{
status = 1;
} finally {
if (c != null) {
}
finally
{
if (c != null)
{
c.abort();
}
}

View File

@@ -7,28 +7,11 @@
*/
package org.dspace.app.mediafilter;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.dspace.app.mediafilter.service.MediaFilterService;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.content.Bitstream;
import org.dspace.content.BitstreamFormat;
import org.dspace.content.Bundle;
import org.dspace.content.*;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.DCDate;
import org.dspace.content.Item;
import org.dspace.content.service.BitstreamFormatService;
import org.dspace.content.service.BitstreamService;
import org.dspace.content.service.BundleService;
import org.dspace.content.service.CollectionService;
import org.dspace.content.service.CommunityService;
import org.dspace.content.service.ItemService;
import org.dspace.content.service.*;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.SelfNamedPlugin;
@@ -38,6 +21,9 @@ import org.dspace.services.ConfigurationService;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import java.io.InputStream;
import java.util.*;
/**
* MediaFilterManager is the class that invokes the media/format filters over the
* repository's content. A few command line flags affect the operation of the
@@ -47,7 +33,8 @@ import org.springframework.beans.factory.annotation.Autowired;
* scope to a community, collection or item; and -m [max] limits processing to a
* maximum number of items.
*/
public class MediaFilterServiceImpl implements MediaFilterService, InitializingBean {
public class MediaFilterServiceImpl implements MediaFilterService, InitializingBean
{
@Autowired(required = true)
protected AuthorizeService authorizeService;
@Autowired(required = true)
@@ -85,25 +72,27 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
protected boolean isQuiet = false;
protected boolean isForce = false; // default to not forced
protected MediaFilterServiceImpl() {
protected MediaFilterServiceImpl()
{
}
@Override
public void afterPropertiesSet() throws Exception {
String[] publicPermissionFilters = configurationService
.getArrayProperty("filter.org.dspace.app.mediafilter.publicPermission");
String[] publicPermissionFilters = configurationService.getArrayProperty("filter.org.dspace.app.mediafilter.publicPermission");
if (publicPermissionFilters != null) {
for (String filter : publicPermissionFilters) {
if(publicPermissionFilters != null) {
for(String filter : publicPermissionFilters) {
publicFiltersClasses.add(filter.trim());
}
}
}
@Override
public void applyFiltersAllItems(Context context) throws Exception {
if (skipList != null) {
public void applyFiltersAllItems(Context context) throws Exception
{
if(skipList!=null)
{
//if a skip-list exists, we need to filter community-by-community
//so we can respect what is in the skip-list
List<Community> topLevelCommunities = communityService.findAllTop(context);
@@ -111,10 +100,13 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
for (Community topLevelCommunity : topLevelCommunities) {
applyFiltersCommunity(context, topLevelCommunity);
}
} else {
}
else
{
//otherwise, just find every item and process
Iterator<Item> itemIterator = itemService.findAll(context);
while (itemIterator.hasNext() && processed < max2Process) {
while (itemIterator.hasNext() && processed < max2Process)
{
applyFiltersItem(context, itemIterator.next());
}
}
@@ -122,14 +114,16 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
@Override
public void applyFiltersCommunity(Context context, Community community)
throws Exception { //only apply filters if community not in skip-list
if (!inSkipList(community.getHandle())) {
List<Community> subcommunities = community.getSubcommunities();
throws Exception
{ //only apply filters if community not in skip-list
if(!inSkipList(community.getHandle()))
{
List<Community> subcommunities = community.getSubcommunities();
for (Community subcommunity : subcommunities) {
applyFiltersCommunity(context, subcommunity);
}
List<Collection> collections = community.getCollections();
List<Collection> collections = community.getCollections();
for (Collection collection : collections) {
applyFiltersCollection(context, collection);
}
@@ -138,36 +132,43 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
@Override
public void applyFiltersCollection(Context context, Collection collection)
throws Exception {
throws Exception
{
//only apply filters if collection not in skip-list
if (!inSkipList(collection.getHandle())) {
if(!inSkipList(collection.getHandle()))
{
Iterator<Item> itemIterator = itemService.findAllByCollection(context, collection);
while (itemIterator.hasNext() && processed < max2Process) {
while (itemIterator.hasNext() && processed < max2Process)
{
applyFiltersItem(context, itemIterator.next());
}
}
}
@Override
public void applyFiltersItem(Context c, Item item) throws Exception {
public void applyFiltersItem(Context c, Item item) throws Exception
{
//only apply filters if item not in skip-list
if (!inSkipList(item.getHandle())) {
//cache this item in MediaFilterManager
//so it can be accessed by MediaFilters as necessary
currentItem = item;
if(!inSkipList(item.getHandle()))
{
//cache this item in MediaFilterManager
//so it can be accessed by MediaFilters as necessary
currentItem = item;
if (filterItem(c, item)) {
// increment processed count
++processed;
}
// clear item objects from context cache and internal cache
c.uncacheEntity(currentItem);
currentItem = null;
if (filterItem(c, item))
{
// increment processed count
++processed;
}
// clear item objects from context cache and internal cache
c.uncacheEntity(currentItem);
currentItem = null;
}
}
@Override
public boolean filterItem(Context context, Item myItem) throws Exception {
public boolean filterItem(Context context, Item myItem) throws Exception
{
// get 'original' bundles
List<Bundle> myBundles = itemService.getBundles(myItem, "ORIGINAL");
boolean done = false;
@@ -184,11 +185,12 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
@Override
public boolean filterBitstream(Context context, Item myItem,
Bitstream myBitstream) throws Exception {
boolean filtered = false;
Bitstream myBitstream) throws Exception
{
boolean filtered = false;
// iterate through filter classes. A single format may be actioned
// by more than one filter
// iterate through filter classes. A single format may be actioned
// by more than one filter
for (FormatFilter filterClass : filterClasses) {
//List fmts = (List)filterFormats.get(filterClasses[i].getClass().getName());
String pluginName = null;
@@ -207,7 +209,7 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
//For other MediaFilters, map key is just:
// <class-name>
List<String> fmts = filterFormats.get(filterClass.getClass().getName() +
(pluginName != null ? FILTER_PLUGIN_SEPARATOR + pluginName : ""));
(pluginName != null ? FILTER_PLUGIN_SEPARATOR + pluginName : ""));
if (fmts.contains(myBitstream.getFormat(context).getShortDescription())) {
try {
@@ -288,7 +290,7 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
}
} catch (Exception e) {
System.out.println("ERROR filtering, skipping bitstream #"
+ myBitstream.getID() + " " + e);
+ myBitstream.getID() + " " + e);
e.printStackTrace();
}
}
@@ -299,13 +301,15 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
@Override
public boolean processBitstream(Context context, Item item, Bitstream source, FormatFilter formatFilter)
throws Exception {
throws Exception
{
//do pre-processing of this bitstream, and if it fails, skip this bitstream!
if (!formatFilter.preProcessBitstream(context, item, source, isVerbose)) {
if(!formatFilter.preProcessBitstream(context, item, source, isVerbose))
{
return false;
}
boolean overWrite = isForce;
boolean overWrite = isForce;
// get bitstream filename, calculate destination filename
String newName = formatFilter.getFilteredName(source.getName());
@@ -316,13 +320,14 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
List<Bundle> bundles = itemService.getBundles(item, formatFilter.getBundleName());
// check if destination bitstream exists
if (bundles.size() > 0) {
if (bundles.size() > 0)
{
// only finds the last match (FIXME?)
for (Bundle bundle : bundles) {
List<Bitstream> bitstreams = bundle.getBitstreams();
for (Bitstream bitstream : bitstreams) {
if (bitstream.getName().trim().equals(newName.trim())) {
if (bitstream.getName().equals(newName)) {
targetBundle = bundle;
existingBitstream = bitstream;
}
@@ -331,18 +336,20 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
}
// if exists and overwrite = false, exit
if (!overWrite && (existingBitstream != null)) {
if (!isQuiet) {
if (!overWrite && (existingBitstream != null))
{
if (!isQuiet)
{
System.out.println("SKIPPED: bitstream " + source.getID()
+ " (item: " + item.getHandle() + ") because '" + newName + "' already exists");
+ " (item: " + item.getHandle() + ") because '" + newName + "' already exists");
}
return false;
}
if (isVerbose) {
if(isVerbose) {
System.out.println("PROCESSING: bitstream " + source.getID()
+ " (item: " + item.getHandle() + ")");
+ " (item: " + item.getHandle() + ")");
}
InputStream destStream;
@@ -352,7 +359,7 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
if (destStream == null) {
if (!isQuiet) {
System.out.println("SKIPPED: bitstream " + source.getID()
+ " (item: " + item.getHandle() + ") because filtering was unsuccessful");
+ " (item: " + item.getHandle() + ") because filtering was unsuccessful");
}
return false;
@@ -363,9 +370,12 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
}
// create new bundle if needed
if (bundles.size() < 1) {
if (bundles.size() < 1)
{
targetBundle = bundleService.create(context, item, formatFilter.getBundleName());
} else {
}
else
{
// take the first match
targetBundle = bundles.get(0);
}
@@ -375,12 +385,12 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
// Now set the format and name of the bitstream
b.setName(context, newName);
b.setSource(context, "Written by FormatFilter " + formatFilter.getClass().getName() +
" on " + DCDate.getCurrent() + " (GMT).");
" on " + DCDate.getCurrent() + " (GMT).");
b.setDescription(context, formatFilter.getDescription());
// Find the proper format
BitstreamFormat bf = bitstreamFormatService.findByShortDescription(context,
formatFilter.getFormatString());
formatFilter.getFormatString());
bitstreamService.setFormat(context, b, bf);
bitstreamService.update(context, b);
@@ -389,7 +399,7 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
authorizeService.removeAllPolicies(context, b);
//- Determine if this is a public-derivative format
if (publicFiltersClasses.contains(formatFilter.getClass().getSimpleName())) {
if(publicFiltersClasses.contains(formatFilter.getClass().getSimpleName())) {
//- Set derivative bitstream to be publicly accessible
Group anonymous = groupService.findByName(context, Group.ANONYMOUS);
authorizeService.addPolicy(context, b, Constants.READ, anonymous);
@@ -400,13 +410,15 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
// fixme - set date?
// we are overwriting, so remove old bitstream
if (existingBitstream != null) {
if (existingBitstream != null)
{
bundleService.removeBitstream(context, targetBundle, existingBitstream);
}
if (!isQuiet) {
if (!isQuiet)
{
System.out.println("FILTERED: bitstream " + source.getID()
+ " (item: " + item.getHandle() + ") and created '" + newName + "'");
+ " (item: " + item.getHandle() + ") and created '" + newName + "'");
}
//do post-processing of the generated bitstream
@@ -416,18 +428,24 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
}
@Override
public Item getCurrentItem() {
public Item getCurrentItem()
{
return currentItem;
}
@Override
public boolean inSkipList(String identifier) {
if (skipList != null && skipList.contains(identifier)) {
if (!isQuiet) {
public boolean inSkipList(String identifier)
{
if(skipList!=null && skipList.contains(identifier))
{
if (!isQuiet)
{
System.out.println("SKIP-LIST: skipped bitstreams within identifier " + identifier);
}
return true;
} else {
}
else
{
return false;
}
}

View File

@@ -7,14 +7,18 @@
*/
package org.dspace.app.mediafilter;
import java.awt.image.BufferedImage;
import java.awt.image.*;
import java.io.InputStream;
import javax.imageio.ImageIO;
import org.apache.pdfbox.pdmodel.PDDocument;
import org.apache.pdfbox.rendering.PDFRenderer;
import org.dspace.content.Item;
import org.dspace.app.mediafilter.JPEGFilter;
/**
* Create JPEG thumbnails from PDF cover page using PDFBox.
* Based on JPEGFilter:
@@ -25,17 +29,21 @@ import org.dspace.content.Item;
* @author Ivan Masár helix84@centrum.sk
* @author Jason Sherman jsherman@usao.edu
*/
public class PDFBoxThumbnail extends MediaFilter implements SelfRegisterInputFormats {
public class PDFBoxThumbnail extends MediaFilter implements SelfRegisterInputFormats
{
@Override
public String getFilteredName(String oldFilename) {
public String getFilteredName(String oldFilename)
{
return oldFilename + ".jpg";
}
/**
* @return String bundle name
*
*/
@Override
public String getBundleName() {
public String getBundleName()
{
return "THUMBNAIL";
}
@@ -43,7 +51,8 @@ public class PDFBoxThumbnail extends MediaFilter implements SelfRegisterInputFor
* @return String bitstreamformat
*/
@Override
public String getFormatString() {
public String getFormatString()
{
return "JPEG";
}
@@ -51,20 +60,23 @@ public class PDFBoxThumbnail extends MediaFilter implements SelfRegisterInputFor
* @return String description
*/
@Override
public String getDescription() {
public String getDescription()
{
return "Generated Thumbnail";
}
/**
* @param currentItem item
* @param source source input stream
* @param verbose verbose mode
* @param source source input stream
* @param verbose verbose mode
*
* @return InputStream the resulting input stream
* @throws Exception if error
*/
@Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception {
throws Exception
{
PDDocument doc = PDDocument.load(source);
PDFRenderer renderer = new PDFRenderer(doc);
BufferedImage buf = renderer.renderImage(0);
@@ -76,17 +88,20 @@ public class PDFBoxThumbnail extends MediaFilter implements SelfRegisterInputFor
}
@Override
public String[] getInputMIMETypes() {
public String[] getInputMIMETypes()
{
return ImageIO.getReaderMIMETypes();
}
@Override
public String[] getInputDescriptions() {
public String[] getInputDescriptions()
{
return null;
}
@Override
public String[] getInputExtensions() {
public String[] getInputExtensions()
{
// Temporarily disabled as JDK 1.6 only
// return ImageIO.getReaderFileSuffixes();
return null;

View File

@@ -28,20 +28,24 @@ import org.dspace.core.ConfigurationManager;
* instantiate filter - bitstream format doesn't exist
*
*/
public class PDFFilter extends MediaFilter {
public class PDFFilter extends MediaFilter
{
private static Logger log = Logger.getLogger(PDFFilter.class);
@Override
public String getFilteredName(String oldFilename) {
public String getFilteredName(String oldFilename)
{
return oldFilename + ".txt";
}
/**
* @return String bundle name
*
*/
@Override
public String getBundleName() {
public String getBundleName()
{
return "TEXT";
}
@@ -49,7 +53,8 @@ public class PDFFilter extends MediaFilter {
* @return String bitstreamformat
*/
@Override
public String getFormatString() {
public String getFormatString()
{
return "Text";
}
@@ -57,21 +62,25 @@ public class PDFFilter extends MediaFilter {
* @return String description
*/
@Override
public String getDescription() {
public String getDescription()
{
return "Extracted text";
}
/**
* @param currentItem item
* @param source source input stream
* @param verbose verbose mode
* @param source source input stream
* @param verbose verbose mode
*
* @return InputStream the resulting input stream
* @throws Exception if error
*/
@Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception {
try {
throws Exception
{
try
{
boolean useTemporaryFile = ConfigurationManager.getBooleanProperty("pdffilter.largepdfs", false);
// get input stream from bitstream
@@ -83,43 +92,62 @@ public class PDFFilter extends MediaFilter {
File tempTextFile = null;
ByteArrayOutputStream byteStream = null;
if (useTemporaryFile) {
if (useTemporaryFile)
{
tempTextFile = File.createTempFile("dspacepdfextract" + source.hashCode(), ".txt");
tempTextFile.deleteOnExit();
writer = new OutputStreamWriter(new FileOutputStream(tempTextFile));
} else {
}
else
{
byteStream = new ByteArrayOutputStream();
writer = new OutputStreamWriter(byteStream);
}
try {
try
{
pdfDoc = PDDocument.load(source);
pts.writeText(pdfDoc, writer);
} finally {
try {
if (pdfDoc != null) {
}
finally
{
try
{
if (pdfDoc != null)
{
pdfDoc.close();
}
} catch (Exception e) {
log.error("Error closing PDF file: " + e.getMessage(), e);
}
catch(Exception e)
{
log.error("Error closing PDF file: " + e.getMessage(), e);
}
try {
try
{
writer.close();
} catch (Exception e) {
log.error("Error closing temporary extract file: " + e.getMessage(), e);
}
catch(Exception e)
{
log.error("Error closing temporary extract file: " + e.getMessage(), e);
}
}
if (useTemporaryFile) {
if (useTemporaryFile)
{
return new FileInputStream(tempTextFile);
} else {
}
else
{
byte[] bytes = byteStream.toByteArray();
return new ByteArrayInputStream(bytes);
}
} catch (OutOfMemoryError oome) {
}
catch (OutOfMemoryError oome)
{
log.error("Error parsing PDF document " + oome.getMessage(), oome);
if (!ConfigurationManager.getBooleanProperty("pdffilter.skiponmemoryexception", false)) {
if (!ConfigurationManager.getBooleanProperty("pdffilter.skiponmemoryexception", false))
{
throw oome;
}
}

View File

@@ -8,8 +8,8 @@
package org.dspace.app.mediafilter;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.IOException;
import org.apache.poi.POITextExtractor;
import org.apache.poi.extractor.ExtractorFactory;
@@ -23,45 +23,55 @@ import org.slf4j.LoggerFactory;
* Extract flat text from Microsoft Word documents (.doc, .docx).
*/
public class PoiWordFilter
extends MediaFilter {
extends MediaFilter
{
private static final Logger LOG = LoggerFactory.getLogger(PoiWordFilter.class);
@Override
public String getFilteredName(String oldFilename) {
public String getFilteredName(String oldFilename)
{
return oldFilename + ".txt";
}
@Override
public String getBundleName() {
public String getBundleName()
{
return "TEXT";
}
@Override
public String getFormatString() {
public String getFormatString()
{
return "Text";
}
@Override
public String getDescription() {
public String getDescription()
{
return "Extracted text";
}
@Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception {
throws Exception
{
String text;
try {
try
{
// get input stream from bitstream, pass to filter, get string back
POITextExtractor extractor = ExtractorFactory.createExtractor(source);
text = extractor.getText();
} catch (IOException | OpenXML4JException | XmlException e) {
}
catch (IOException | OpenXML4JException | XmlException e)
{
System.err.format("Invalid File Format: %s%n", e.getMessage());
LOG.error("Unable to parse the bitstream: ", e);
throw e;
}
// if verbose flag is set, print out extracted text to STDOUT
if (verbose) {
if (verbose)
{
System.out.println(text);
}

View File

@@ -10,41 +10,47 @@ package org.dspace.app.mediafilter;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import org.apache.log4j.Logger;
import org.apache.poi.POITextExtractor;
import org.apache.poi.extractor.ExtractorFactory;
import org.apache.poi.hslf.extractor.PowerPointExtractor;
import org.apache.poi.xslf.extractor.XSLFPowerPointExtractor;
import org.apache.poi.hslf.extractor.PowerPointExtractor;
import org.apache.poi.POITextExtractor;
import org.apache.log4j.Logger;
import org.dspace.content.Item;
/*
* TODO: Allow user to configure extraction of only text or only notes
*
*/
public class PowerPointFilter extends MediaFilter {
public class PowerPointFilter extends MediaFilter
{
private static Logger log = Logger.getLogger(PowerPointFilter.class);
@Override
public String getFilteredName(String oldFilename) {
public String getFilteredName(String oldFilename)
{
return oldFilename + ".txt";
}
/**
* @return String bundle name
*
*/
@Override
public String getBundleName() {
public String getBundleName()
{
return "TEXT";
}
/**
* @return String bitstream format
*
* TODO: Check that this is correct
* TODO: Check that this is correct
*/
@Override
public String getFormatString() {
public String getFormatString()
{
return "Text";
}
@@ -52,48 +58,59 @@ public class PowerPointFilter extends MediaFilter {
* @return String description
*/
@Override
public String getDescription() {
public String getDescription()
{
return "Extracted text";
}
/**
* @param currentItem item
* @param source source input stream
* @param verbose verbose mode
* @param source source input stream
* @param verbose verbose mode
*
* @return InputStream the resulting input stream
* @throws Exception if error
*/
@Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception {
throws Exception
{
try {
try
{
String extractedText = null;
new ExtractorFactory();
POITextExtractor pptExtractor = ExtractorFactory
.createExtractor(source);
.createExtractor(source);
// PowerPoint XML files and legacy format PowerPoint files
// require different classes and APIs for text extraction
// If this is a PowerPoint XML file, extract accordingly
if (pptExtractor instanceof XSLFPowerPointExtractor) {
if (pptExtractor instanceof XSLFPowerPointExtractor)
{
// The true method arguments indicate that text from
// the slides and the notes is desired
extractedText = ((XSLFPowerPointExtractor) pptExtractor)
.getText(true, true);
} else if (pptExtractor instanceof PowerPointExtractor) { // Legacy PowerPoint files
.getText(true, true);
}
// Legacy PowerPoint files
else if (pptExtractor instanceof PowerPointExtractor)
{
extractedText = ((PowerPointExtractor) pptExtractor).getText()
+ " " + ((PowerPointExtractor) pptExtractor).getNotes();
+ " " + ((PowerPointExtractor) pptExtractor).getNotes();
}
if (extractedText != null) {
if (extractedText != null)
{
// if verbose flag is set, print out extracted text
// to STDOUT
if (verbose) {
if (verbose)
{
System.out.println(extractedText);
}
@@ -103,7 +120,9 @@ public class PowerPointFilter extends MediaFilter {
return bais;
}
} catch (Exception e) {
}
catch (Exception e)
{
log.error("Error filtering bitstream: " + e.getMessage(), e);
throw e;
}

View File

@@ -11,7 +11,8 @@ package org.dspace.app.mediafilter;
* Interface to allow filters to register the input formats they handle
* (useful for exposing underlying capabilities of libraries used)
*/
public interface SelfRegisterInputFormats {
public interface SelfRegisterInputFormats
{
public String[] getInputMIMETypes();
public String[] getInputDescriptions();

View File

@@ -8,10 +8,11 @@
package org.dspace.app.mediafilter;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.IOException;
import org.apache.log4j.Logger;
import org.dspace.content.Item;
import org.textmining.extraction.TextExtractor;
import org.textmining.extraction.word.WordTextExtractorFactory;
@@ -22,20 +23,24 @@ import org.textmining.extraction.word.WordTextExtractorFactory;
* instantiate filter - bitstream format doesn't exist.
*
*/
public class WordFilter extends MediaFilter {
public class WordFilter extends MediaFilter
{
private static Logger log = Logger.getLogger(WordFilter.class);
@Override
public String getFilteredName(String oldFilename) {
public String getFilteredName(String oldFilename)
{
return oldFilename + ".txt";
}
/**
* @return String bundle name
*
*/
@Override
public String getBundleName() {
public String getBundleName()
{
return "TEXT";
}
@@ -43,7 +48,8 @@ public class WordFilter extends MediaFilter {
* @return String bitstreamformat
*/
@Override
public String getFormatString() {
public String getFormatString()
{
return "Text";
}
@@ -51,30 +57,35 @@ public class WordFilter extends MediaFilter {
* @return String description
*/
@Override
public String getDescription() {
public String getDescription()
{
return "Extracted text";
}
/**
* @param currentItem item
* @param source source input stream
* @param verbose verbose mode
* @param source source input stream
* @param verbose verbose mode
*
* @return InputStream the resulting input stream
* @throws Exception if error
*/
@Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception {
throws Exception
{
// get input stream from bitstream
// pass to filter, get string back
try {
try
{
WordTextExtractorFactory factory = new WordTextExtractorFactory();
TextExtractor e = factory.textExtractor(source);
String extractedText = e.getText();
// if verbose flag is set, print out extracted text
// to STDOUT
if (verbose) {
if (verbose)
{
System.out.println(extractedText);
}
@@ -83,10 +94,12 @@ public class WordFilter extends MediaFilter {
ByteArrayInputStream bais = new ByteArrayInputStream(textBytes);
return bais; // will this work? or will the byte array be out of scope?
} catch (IOException ioe) {
}
catch (IOException ioe)
{
System.out.println("Invalid Word Format");
log.error("Error detected - Word File format not recognized: "
+ ioe.getMessage(), ioe);
+ ioe.getMessage(), ioe);
throw ioe;
}
}

View File

@@ -11,8 +11,7 @@ import org.dspace.app.mediafilter.service.MediaFilterService;
import org.dspace.services.factory.DSpaceServicesFactory;
/**
* Abstract factory to get services for the mediafilter package, use MediaFilterServiceFactory.getInstance() to
* retrieve an implementation
* Abstract factory to get services for the mediafilter package, use MediaFilterServiceFactory.getInstance() to retrieve an implementation
*
* @author kevinvandevelde at atmire.com
*/
@@ -20,8 +19,7 @@ public abstract class MediaFilterServiceFactory {
public abstract MediaFilterService getMediaFilterService();
public static MediaFilterServiceFactory getInstance() {
return DSpaceServicesFactory.getInstance().getServiceManager()
.getServiceByName("mediaFilterServiceFactory", MediaFilterServiceFactory.class);
public static MediaFilterServiceFactory getInstance(){
return DSpaceServicesFactory.getInstance().getServiceManager().getServiceByName("mediaFilterServiceFactory", MediaFilterServiceFactory.class);
}
}

View File

@@ -11,8 +11,7 @@ import org.dspace.app.mediafilter.service.MediaFilterService;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Factory implementation to get services for the mediafilter package, use MediaFilterServiceFactory.getInstance() to
* retrieve an implementation
* Factory implementation to get services for the mediafilter package, use MediaFilterServiceFactory.getInstance() to retrieve an implementation
*
* @author kevinvandevelde at atmire.com
*/

View File

@@ -7,9 +7,6 @@
*/
package org.dspace.app.mediafilter.service;
import java.util.List;
import java.util.Map;
import org.dspace.app.mediafilter.FormatFilter;
import org.dspace.content.Bitstream;
import org.dspace.content.Collection;
@@ -17,6 +14,9 @@ import org.dspace.content.Community;
import org.dspace.content.Item;
import org.dspace.core.Context;
import java.util.List;
import java.util.Map;
/**
* MediaFilterManager is the class that invokes the media/format filters over the
* repository's content. A few command line flags affect the operation of the
@@ -36,10 +36,10 @@ public interface MediaFilterService {
public void applyFiltersAllItems(Context context) throws Exception;
public void applyFiltersCommunity(Context context, Community community)
throws Exception;
throws Exception;
public void applyFiltersCollection(Context context, Collection collection)
throws Exception;
throws Exception;
public void applyFiltersItem(Context c, Item item) throws Exception;
@@ -49,9 +49,9 @@ public interface MediaFilterService {
* filters if possible.
*
* @param context context
* @param myItem item
* @param myItem item
* @return true if any bitstreams processed,
* false if none
* false if none
* @throws Exception if error
*/
public boolean filterItem(Context context, Item myItem) throws Exception;
@@ -63,11 +63,11 @@ public interface MediaFilterService {
* instantiated. Exceptions from filtering will be logged to STDOUT and
* swallowed.
*
* @param c context
* @param myItem item
* @param c context
* @param myItem item
* @param myBitstream bitstream
* @return true if bitstream processed,
* false if no applicable filter or already processed
* false if no applicable filter or already processed
* @throws Exception if error
*/
public boolean filterBitstream(Context c, Item myItem, Bitstream myBitstream) throws Exception;
@@ -79,16 +79,20 @@ public interface MediaFilterService {
* already been filtered, and if not or if overWrite is set, invokes the
* filter.
*
* @param context context
* @param item item containing bitstream to process
* @param source source bitstream to process
* @param formatFilter FormatFilter to perform filtering
* @return true if new rendition is created, false if rendition already
* exists and overWrite is not set
* @param context
* context
* @param item
* item containing bitstream to process
* @param source
* source bitstream to process
* @param formatFilter
* FormatFilter to perform filtering
* @throws Exception if error occurs
* @return true if new rendition is created, false if rendition already
* exists and overWrite is not set
*/
public boolean processBitstream(Context context, Item item, Bitstream source, FormatFilter formatFilter)
throws Exception;
throws Exception;
/**
* Return the item that is currently being processed/filtered
@@ -105,9 +109,11 @@ public interface MediaFilterService {
/**
* Check whether or not to skip processing the given identifier.
*
* @param identifier identifier (handle) of a community, collection or item
* @param identifier
* identifier (handle) of a community, collection or item
*
* @return true if this community, collection or item should be skipped
* during processing. Otherwise, return false.
* during processing. Otherwise, return false.
*/
public boolean inSkipList(String identifier);

View File

@@ -7,19 +7,7 @@
*/
package org.dspace.app.packager;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStreamReader;
import java.sql.SQLException;
import java.util.List;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.cli.*;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject;
import org.dspace.content.crosswalk.CrosswalkException;
@@ -36,6 +24,10 @@ import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.workflow.WorkflowException;
import java.io.*;
import java.sql.SQLException;
import java.util.List;
/**
* Command-line interface to the Packager plugin.
* <p>
@@ -51,8 +43,7 @@ import org.dspace.workflow.WorkflowException;
* (Add the -h option to get the command to show its own help)
*
* <pre>
* 1. To submit a SIP (submissions tend to create a *new* object, with a new handle. If you want to restore an
* object, see -r option below)
* 1. To submit a SIP (submissions tend to create a *new* object, with a new handle. If you want to restore an object, see -r option below)
* dspace packager
* -e {ePerson}
* -t {PackagerType}
@@ -72,8 +63,7 @@ import org.dspace.workflow.WorkflowException;
*
* 2. To restore an AIP (similar to submit mode, but attempts to restore with the handles/parents specified in AIP):
* dspace packager
* -r --- restores a object from a package info, including the specified handle (will throw an error if
* handle is already in use)
* -r --- restores a object from a package info, including the specified handle (will throw an error if handle is already in use)
* -e {ePerson}
* -t {PackagerType}
* [-o {name}={value} [ -o {name}={value} ..]]
@@ -83,19 +73,14 @@ import org.dspace.workflow.WorkflowException;
* Use with -r to only restore objects which do not already exist. By default, -r will throw an error
* and rollback all changes when an object is found that already exists.
* [-f] --- Force a restore (even if object already exists).
* Use with -r to replace an existing object with one from a package (essentially a delete and
* restore).
* By default, -r will throw an error and rollback all changes when an object is found that already
* exists.
* [-i {identifier-handle-of-object}] -- Optional when -f is specified. When replacing an object, you can
* specify the
* Use with -r to replace an existing object with one from a package (essentially a delete and restore).
* By default, -r will throw an error and rollback all changes when an object is found that already exists.
* [-i {identifier-handle-of-object}] -- Optional when -f is specified. When replacing an object, you can specify the
* object to replace if it cannot be easily determined from the package itself.
* {package-filename}
*
* Restoring is very similar to submitting, except that you are recreating pre-existing objects. So, in a restore,
* the object(s) are
* being recreated based on the details in the AIP. This means that the object is recreated with the same handle
* and same parent/children
* Restoring is very similar to submitting, except that you are recreating pre-existing objects. So, in a restore, the object(s) are
* being recreated based on the details in the AIP. This means that the object is recreated with the same handle and same parent/children
* objects. Not all {PackagerTypes} may support a "restore".
*
* 3. To write out a DIP:
@@ -120,60 +105,49 @@ import org.dspace.workflow.WorkflowException;
* @author Tim Donohue
* @version $Revision$
*/
public class Packager {
public class Packager
{
/* Various private global settings/options */
protected String packageType = null;
protected boolean submit = true;
protected boolean userInteractionEnabled = true;
// die from illegal command line
protected static void usageError(String msg) {
protected static void usageError(String msg)
{
System.out.println(msg);
System.out.println(" (run with -h flag for details)");
System.exit(1);
}
public static void main(String[] argv) throws Exception {
public static void main(String[] argv) throws Exception
{
Options options = new Options();
options.addOption("p", "parent", true,
"Handle(s) of parent Community or Collection into which to ingest object (repeatable)");
"Handle(s) of parent Community or Collection into which to ingest object (repeatable)");
options.addOption("e", "eperson", true,
"email address of eperson doing importing");
"email address of eperson doing importing");
options
.addOption(
"w",
"install",
false,
"disable workflow; install immediately without going through collection's workflow");
options.addOption("r", "restore", false,
"ingest in \"restore\" mode. Restores a missing object based on the contents in a package.");
options.addOption("k", "keep-existing", false,
"if an object is found to already exist during a restore (-r), then keep the existing " +
"object and continue processing. Can only be used with '-r'. This avoids " +
"object-exists errors which are thrown by -r by default.");
options.addOption("f", "force-replace", false,
"if an object is found to already exist during a restore (-r), then remove it and replace " +
"it with the contents of the package. Can only be used with '-r'. This REPLACES the " +
"object(s) in the repository with the contents from the package(s).");
.addOption(
"w",
"install",
false,
"disable workflow; install immediately without going through collection's workflow");
options.addOption("r", "restore", false, "ingest in \"restore\" mode. Restores a missing object based on the contents in a package.");
options.addOption("k", "keep-existing", false, "if an object is found to already exist during a restore (-r), then keep the existing object and continue processing. Can only be used with '-r'. This avoids object-exists errors which are thrown by -r by default.");
options.addOption("f", "force-replace", false, "if an object is found to already exist during a restore (-r), then remove it and replace it with the contents of the package. Can only be used with '-r'. This REPLACES the object(s) in the repository with the contents from the package(s).");
options.addOption("t", "type", true, "package type or MIMEtype");
options
.addOption("o", "option", true,
"Packager option to pass to plugin, \"name=value\" (repeatable)");
.addOption("o", "option", true,
"Packager option to pass to plugin, \"name=value\" (repeatable)");
options.addOption("d", "disseminate", false,
"Disseminate package (output); default is to submit.");
"Disseminate package (output); default is to submit.");
options.addOption("s", "submit", false,
"Submission package (Input); this is the default. ");
"Submission package (Input); this is the default. ");
options.addOption("i", "identifier", true, "Handle of object to disseminate.");
options.addOption("a", "all", false,
"also recursively ingest/disseminate any child packages, e.g. all Items within a Collection" +
" (not all packagers may support this option!)");
options.addOption("h", "help", false,
"help (you may also specify '-h -t [type]' for additional help with a specific type of " +
"packager)");
options.addOption("u", "no-user-interaction", false,
"Skips over all user interaction (i.e. [y/n] question prompts) within this script. This " +
"flag can be used if you want to save (pipe) a report of all changes to a file, and " +
"therefore need to bypass all user interaction.");
options.addOption("a", "all", false, "also recursively ingest/disseminate any child packages, e.g. all Items within a Collection (not all packagers may support this option!)");
options.addOption("h", "help", false, "help (you may also specify '-h -t [type]' for additional help with a specific type of packager)");
options.addOption("u", "no-user-interaction", false, "Skips over all user interaction (i.e. [y/n] question prompts) within this script. This flag can be used if you want to save (pipe) a report of all changes to a file, and therefore need to bypass all user interaction.");
CommandLineParser parser = new PosixParser();
CommandLine line = parser.parse(options, argv);
@@ -188,13 +162,15 @@ public class Packager {
//initialize a new packager -- we'll add all our current params as settings
Packager myPackager = new Packager();
if (line.hasOption('h')) {
if (line.hasOption('h'))
{
HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("Packager [options] package-file|-\n",
options);
options);
//If user specified a type, also print out the SIP and DIP options
// that are specific to that type of packager
if (line.hasOption('t')) {
if (line.hasOption('t'))
{
System.out.println("\n--------------------------------------------------------------");
System.out.println("Additional options for the " + line.getOptionValue('t') + " packager:");
System.out.println("--------------------------------------------------------------");
@@ -203,36 +179,44 @@ public class Packager {
PackageIngester sip = (PackageIngester) pluginService
.getNamedPlugin(PackageIngester.class, line.getOptionValue('t'));
if (sip != null) {
if (sip != null)
{
System.out.println("\n\n" + line.getOptionValue('t') + " Submission (SIP) plugin options:\n");
System.out.println(sip.getParameterHelp());
} else {
}
else
{
System.out.println("\nNo valid Submission plugin found for " + line.getOptionValue('t') + " type.");
}
PackageDisseminator dip = (PackageDisseminator) pluginService
.getNamedPlugin(PackageDisseminator.class, line.getOptionValue('t'));
if (dip != null) {
if (dip != null)
{
System.out.println("\n\n" + line.getOptionValue('t') + " Dissemination (DIP) plugin options:\n");
System.out.println(dip.getParameterHelp());
} else {
System.out
.println("\nNo valid Dissemination plugin found for " + line.getOptionValue('t') + " type.");
}
else
{
System.out.println("\nNo valid Dissemination plugin found for " + line.getOptionValue('t') + " type.");
}
} else {
//otherwise, display list of valid packager types
}
else //otherwise, display list of valid packager types
{
System.out.println("\nAvailable Submission Package (SIP) types:");
String pn[] = pluginService
.getAllPluginNames(PackageIngester.class);
for (int i = 0; i < pn.length; ++i) {
.getAllPluginNames(PackageIngester.class);
for (int i = 0; i < pn.length; ++i)
{
System.out.println(" " + pn[i]);
}
System.out
.println("\nAvailable Dissemination Package (DIP) types:");
.println("\nAvailable Dissemination Package (DIP) types:");
pn = pluginService.getAllPluginNames(PackageDisseminator.class);
for (int i = 0; i < pn.length; ++i) {
for (int i = 0; i < pn.length; ++i)
{
System.out.println(" " + pn[i]);
}
}
@@ -240,66 +224,85 @@ public class Packager {
}
//look for flag to disable all user interaction
if (line.hasOption('u')) {
if(line.hasOption('u'))
{
myPackager.userInteractionEnabled = false;
}
if (line.hasOption('w')) {
if (line.hasOption('w'))
{
pkgParams.setWorkflowEnabled(false);
}
if (line.hasOption('r')) {
if (line.hasOption('r'))
{
pkgParams.setRestoreModeEnabled(true);
}
//keep-existing is only valid in restoreMode (-r) -- otherwise ignore -k option.
if (line.hasOption('k') && pkgParams.restoreModeEnabled()) {
if (line.hasOption('k') && pkgParams.restoreModeEnabled())
{
pkgParams.setKeepExistingModeEnabled(true);
}
//force-replace is only valid in restoreMode (-r) -- otherwise ignore -f option.
if (line.hasOption('f') && pkgParams.restoreModeEnabled()) {
if (line.hasOption('f') && pkgParams.restoreModeEnabled())
{
pkgParams.setReplaceModeEnabled(true);
}
if (line.hasOption('e')) {
if (line.hasOption('e'))
{
eperson = line.getOptionValue('e');
}
if (line.hasOption('p')) {
if (line.hasOption('p'))
{
parents = line.getOptionValues('p');
}
if (line.hasOption('t')) {
if (line.hasOption('t'))
{
myPackager.packageType = line.getOptionValue('t');
}
if (line.hasOption('i')) {
if (line.hasOption('i'))
{
identifier = line.getOptionValue('i');
}
if (line.hasOption('a')) {
//enable 'recursiveMode' param to packager implementations, in case it helps with packaging or ingestion
// process
if (line.hasOption('a'))
{
//enable 'recursiveMode' param to packager implementations, in case it helps with packaging or ingestion process
pkgParams.setRecursiveModeEnabled(true);
}
String files[] = line.getArgs();
if (files.length > 0) {
if (files.length > 0)
{
sourceFile = files[0];
}
if (line.hasOption('d')) {
if (line.hasOption('d'))
{
myPackager.submit = false;
}
if (line.hasOption('o')) {
if (line.hasOption('o'))
{
String popt[] = line.getOptionValues('o');
for (int i = 0; i < popt.length; ++i) {
for (int i = 0; i < popt.length; ++i)
{
String pair[] = popt[i].split("\\=", 2);
if (pair.length == 2) {
if (pair.length == 2)
{
pkgParams.addProperty(pair[0].trim(), pair[1].trim());
} else if (pair.length == 1) {
}
else if (pair.length == 1)
{
pkgParams.addProperty(pair[0].trim(), "");
} else {
}
else
{
System.err
.println("Warning: Illegal package option format: \""
+ popt[i] + "\"");
.println("Warning: Illegal package option format: \""
+ popt[i] + "\"");
}
}
}
// Sanity checks on arg list: required args
// REQUIRED: sourceFile, ePerson (-e), packageType (-t)
if (sourceFile == null || eperson == null || myPackager.packageType == null) {
if (sourceFile == null || eperson == null || myPackager.packageType == null)
{
System.err.println("Error - missing a REQUIRED argument or option.\n");
HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("PackageManager [options] package-file|-\n", options);
@@ -310,60 +313,68 @@ public class Packager {
Context context = new Context();
EPerson myEPerson = null;
myEPerson = EPersonServiceFactory.getInstance().getEPersonService().findByEmail(context, eperson);
if (myEPerson == null) {
if (myEPerson == null)
{
usageError("Error, eperson cannot be found: " + eperson);
}
context.setCurrentUser(myEPerson);
//If we are in REPLACE mode
if (pkgParams.replaceModeEnabled()) {
if(pkgParams.replaceModeEnabled())
{
context.setMode(Context.Mode.BATCH_EDIT);
PackageIngester sip = (PackageIngester) pluginService
.getNamedPlugin(PackageIngester.class, myPackager.packageType);
if (sip == null) {
.getNamedPlugin(PackageIngester.class, myPackager.packageType);
if (sip == null)
{
usageError("Error, Unknown package type: " + myPackager.packageType);
}
DSpaceObject objToReplace = null;
//if a specific identifier was specified, make sure it is valid
if (identifier != null && identifier.length() > 0) {
objToReplace = HandleServiceFactory.getInstance().getHandleService()
.resolveToObject(context, identifier);
if (objToReplace == null) {
if(identifier!=null && identifier.length()>0)
{
objToReplace = HandleServiceFactory.getInstance().getHandleService().resolveToObject(context, identifier);
if (objToReplace == null)
{
throw new IllegalArgumentException("Bad identifier/handle -- "
+ "Cannot resolve handle \"" + identifier + "\"");
+ "Cannot resolve handle \"" + identifier + "\"");
}
}
String choiceString = null;
if (myPackager.userInteractionEnabled) {
if(myPackager.userInteractionEnabled)
{
BufferedReader input = new BufferedReader(new InputStreamReader(System.in));
System.out.println("\n\nWARNING -- You are running the packager in REPLACE mode.");
System.out.println(
"\nREPLACE mode may be potentially dangerous as it will automatically remove and replace contents" +
" within DSpace.");
System.out.println(
"We highly recommend backing up all your DSpace contents (files & database) before continuing.");
System.out.println("\nREPLACE mode may be potentially dangerous as it will automatically remove and replace contents within DSpace.");
System.out.println("We highly recommend backing up all your DSpace contents (files & database) before continuing.");
System.out.print("\nWould you like to continue? [y/n]: ");
choiceString = input.readLine();
} else {
}
else
{
//user interaction disabled -- default answer to 'yes', otherwise script won't continue
choiceString = "y";
}
if (choiceString.equalsIgnoreCase("y")) {
if (choiceString.equalsIgnoreCase("y"))
{
System.out.println("Beginning replacement process...");
try {
try
{
//replace the object from the source file
myPackager.replace(context, sip, pkgParams, sourceFile, objToReplace);
//commit all changes & exit successfully
context.complete();
System.exit(0);
} catch (Exception e) {
}
catch (Exception e)
{
// abort all operations
e.printStackTrace();
context.abort();
@@ -372,67 +383,78 @@ public class Packager {
}
}
} else if (myPackager.submit || pkgParams.restoreModeEnabled()) {
//else if normal SUBMIT mode (or basic RESTORE mode -- which is a special type of submission)
}
//else if normal SUBMIT mode (or basic RESTORE mode -- which is a special type of submission)
else if (myPackager.submit || pkgParams.restoreModeEnabled())
{
context.setMode(Context.Mode.BATCH_EDIT);
PackageIngester sip = (PackageIngester) pluginService
.getNamedPlugin(PackageIngester.class, myPackager.packageType);
if (sip == null) {
.getNamedPlugin(PackageIngester.class, myPackager.packageType);
if (sip == null)
{
usageError("Error, Unknown package type: " + myPackager.packageType);
}
// validate each parent arg (if any)
DSpaceObject parentObjs[] = null;
if (parents != null) {
if(parents!=null)
{
System.out.println("Destination parents:");
parentObjs = new DSpaceObject[parents.length];
for (int i = 0; i < parents.length; i++) {
for (int i = 0; i < parents.length; i++)
{
// sanity check: did handle resolve?
parentObjs[i] = HandleServiceFactory.getInstance().getHandleService().resolveToObject(context,
parents[i]);
if (parentObjs[i] == null) {
parents[i]);
if (parentObjs[i] == null)
{
throw new IllegalArgumentException(
"Bad parent list -- "
+ "Cannot resolve parent handle \""
+ parents[i] + "\"");
"Bad parent list -- "
+ "Cannot resolve parent handle \""
+ parents[i] + "\"");
}
System.out.println((i == 0 ? "Owner: " : "Parent: ")
+ parentObjs[i].getHandle());
+ parentObjs[i].getHandle());
}
}
try {
try
{
//ingest the object from the source file
myPackager.ingest(context, sip, pkgParams, sourceFile, parentObjs);
//commit all changes & exit successfully
context.complete();
System.exit(0);
} catch (Exception e) {
}
catch (Exception e)
{
// abort all operations
e.printStackTrace();
context.abort();
System.out.println(e);
System.exit(1);
}
} else {
// else, if DISSEMINATE mode
}// else, if DISSEMINATE mode
else
{
context.setMode(Context.Mode.READ_ONLY);
//retrieve specified package disseminator
PackageDisseminator dip = (PackageDisseminator) pluginService
.getNamedPlugin(PackageDisseminator.class, myPackager.packageType);
if (dip == null) {
if (dip == null)
{
usageError("Error, Unknown package type: " + myPackager.packageType);
}
DSpaceObject dso = HandleServiceFactory.getInstance().getHandleService()
.resolveToObject(context, identifier);
if (dso == null) {
DSpaceObject dso = HandleServiceFactory.getInstance().getHandleService().resolveToObject(context, identifier);
if (dso == null)
{
throw new IllegalArgumentException("Bad identifier/handle -- "
+ "Cannot resolve handle \"" + identifier + "\"");
+ "Cannot resolve handle \"" + identifier + "\"");
}
//disseminate the requested object
@@ -448,26 +470,26 @@ public class Packager {
* <p>
* Please note that replace (-r -f) mode calls the replace() method instead.
*
* @param context DSpace Context
* @param sip PackageIngester which will actually ingest the package
* @param pkgParams Parameters to pass to individual packager instances
* @param context DSpace Context
* @param sip PackageIngester which will actually ingest the package
* @param pkgParams Parameters to pass to individual packager instances
* @param sourceFile location of the source package to ingest
* @param parentObjs Parent DSpace object(s) to attach new object to
* @throws IOException if IO error
* @throws SQLException if database error
* @throws IOException if IO error
* @throws SQLException if database error
* @throws FileNotFoundException if file doesn't exist
* @throws AuthorizeException if authorization error
* @throws CrosswalkException if crosswalk error
* @throws PackageException if packaging error
* @throws AuthorizeException if authorization error
* @throws CrosswalkException if crosswalk error
* @throws PackageException if packaging error
*/
protected void ingest(Context context, PackageIngester sip, PackageParameters pkgParams, String sourceFile,
DSpaceObject parentObjs[])
throws IOException, SQLException, FileNotFoundException, AuthorizeException, CrosswalkException,
PackageException {
protected void ingest(Context context, PackageIngester sip, PackageParameters pkgParams, String sourceFile, DSpaceObject parentObjs[])
throws IOException, SQLException, FileNotFoundException, AuthorizeException, CrosswalkException, PackageException
{
// make sure we have an input file
File pkgFile = new File(sourceFile);
if (!pkgFile.exists()) {
if(!pkgFile.exists())
{
System.out.println("\nERROR: Package located at " + sourceFile + " does not exist!");
System.exit(1);
}
@@ -476,92 +498,108 @@ public class Packager {
//find first parent (if specified) -- this will be the "owner" of the object
DSpaceObject parent = null;
if (parentObjs != null && parentObjs.length > 0) {
if(parentObjs!=null && parentObjs.length>0)
{
parent = parentObjs[0];
}
//NOTE: at this point, Parent may be null -- in which case it is up to the PackageIngester
// to either determine the Parent (from package contents) or throw an error.
try {
try
{
//If we are doing a recursive ingest, call ingestAll()
if (pkgParams.recursiveModeEnabled()) {
if(pkgParams.recursiveModeEnabled())
{
System.out.println("\nAlso ingesting all referenced packages (recursive mode)..");
System.out.println(
"This may take a while, please check your logs for ongoing status while we process each package.");
System.out.println("This may take a while, please check your logs for ongoing status while we process each package.");
//ingest first package & recursively ingest anything else that package references (child packages, etc)
List<String> hdlResults = sip.ingestAll(context, parent, pkgFile, pkgParams, null);
if (hdlResults != null) {
if (hdlResults != null)
{
//Report total objects created
System.out.println("\nCREATED a total of " + hdlResults.size() + " DSpace Objects.");
String choiceString = null;
//Ask if user wants full list printed to command line, as this may be rather long.
if (this.userInteractionEnabled) {
if (this.userInteractionEnabled)
{
BufferedReader input = new BufferedReader(new InputStreamReader(System.in));
System.out.print("\nWould you like to view a list of all objects that were created? [y/n]: ");
choiceString = input.readLine();
} else {
}
else
{
// user interaction disabled -- default answer to 'yes', as
// we want to provide user with as detailed a report as possible.
choiceString = "y";
}
// Provide detailed report if user answered 'yes'
if (choiceString.equalsIgnoreCase("y")) {
if (choiceString.equalsIgnoreCase("y"))
{
System.out.println("\n\n");
for (String result : hdlResults) {
DSpaceObject dso = HandleServiceFactory.getInstance().getHandleService()
.resolveToObject(context, result);
for (String result : hdlResults)
{
DSpaceObject dso = HandleServiceFactory.getInstance().getHandleService().resolveToObject(context, result);
if (dso != null) {
if(dso!=null)
{
if (pkgParams.restoreModeEnabled()) {
System.out.println("RESTORED DSpace " + Constants.typeText[dso.getType()] +
" [ hdl=" + dso.getHandle() + ", dbID=" + dso
.getID() + " ] ");
" [ hdl=" + dso.getHandle() + ", dbID=" + dso.getID() + " ] ");
} else {
System.out.println("CREATED new DSpace " + Constants.typeText[dso.getType()] +
" [ hdl=" + dso.getHandle() + ", dbID=" + dso
.getID() + " ] ");
" [ hdl=" + dso.getHandle() + ", dbID=" + dso.getID() + " ] ");
}
}
}
}
}
} else {
}
else
{
//otherwise, just one package to ingest
try {
try
{
DSpaceObject dso = sip.ingest(context, parent, pkgFile, pkgParams, null);
if (dso != null) {
if (pkgParams.restoreModeEnabled()) {
if (dso != null)
{
if (pkgParams.restoreModeEnabled())
{
System.out.println("RESTORED DSpace " + Constants.typeText[dso.getType()] +
" [ hdl=" + dso.getHandle() + ", dbID=" + dso.getID() + " ] ");
} else {
" [ hdl=" + dso.getHandle() + ", dbID=" + dso.getID() + " ] ");
}
else
{
System.out.println("CREATED new DSpace " + Constants.typeText[dso.getType()] +
" [ hdl=" + dso.getHandle() + ", dbID=" + dso.getID() + " ] ");
" [ hdl=" + dso.getHandle() + ", dbID=" + dso.getID() + " ] ");
}
}
} catch (IllegalStateException ie) {
}
catch (IllegalStateException ie)
{
// NOTE: if we encounter an IllegalStateException, this means the
// handle is already in use and this object already exists.
//if we are skipping over (i.e. keeping) existing objects
if (pkgParams.keepExistingModeEnabled()) {
System.out.println(
"\nSKIPPED processing package '" + pkgFile + "', as an Object already exists with this " +
"handle.");
} else {
// Pass this exception on -- which essentially causes a full rollback of all changes (this
// is the default)
if (pkgParams.keepExistingModeEnabled())
{
System.out.println("\nSKIPPED processing package '" + pkgFile + "', as an Object already exists with this handle.");
}
else // Pass this exception on -- which essentially causes a full rollback of all changes (this is the default)
{
throw ie;
}
}
}
} catch (WorkflowException e) {
}
catch (WorkflowException e)
{
throw new PackageException(e);
}
}
@@ -571,116 +609,128 @@ public class Packager {
* Disseminate one or more DSpace objects into package(s) based on the
* options passed to the 'packager' script
*
* @param context DSpace context
* @param dip PackageDisseminator which will actually create the package
* @param dso DSpace Object to disseminate as a package
* @param pkgParams Parameters to pass to individual packager instances
* @param context DSpace context
* @param dip PackageDisseminator which will actually create the package
* @param dso DSpace Object to disseminate as a package
* @param pkgParams Parameters to pass to individual packager instances
* @param outputFile File where final package should be saved
* @throws IOException if IO error
* @throws SQLException if database error
* @throws IOException if IO error
* @throws SQLException if database error
* @throws FileNotFoundException if file doesn't exist
* @throws AuthorizeException if authorization error
* @throws CrosswalkException if crosswalk error
* @throws PackageException if packaging error
* @throws AuthorizeException if authorization error
* @throws CrosswalkException if crosswalk error
* @throws PackageException if packaging error
*/
protected void disseminate(Context context, PackageDisseminator dip,
DSpaceObject dso, PackageParameters pkgParams,
String outputFile)
throws IOException, SQLException, FileNotFoundException, AuthorizeException, CrosswalkException,
PackageException {
DSpaceObject dso, PackageParameters pkgParams,
String outputFile)
throws IOException, SQLException, FileNotFoundException, AuthorizeException, CrosswalkException, PackageException
{
// initialize output file
File pkgFile = new File(outputFile);
System.out.println("\nDisseminating DSpace " + Constants.typeText[dso.getType()] +
" [ hdl=" + dso.getHandle() + " ] to " + outputFile);
" [ hdl=" + dso.getHandle() + " ] to " + outputFile);
//If we are doing a recursive dissemination of this object & all its child objects, call disseminateAll()
if (pkgParams.recursiveModeEnabled()) {
if(pkgParams.recursiveModeEnabled())
{
System.out.println("\nAlso disseminating all child objects (recursive mode)..");
System.out.println(
"This may take a while, please check your logs for ongoing status while we process each package.");
System.out.println("This may take a while, please check your logs for ongoing status while we process each package.");
//disseminate initial object & recursively disseminate all child objects as well
List<File> fileResults = dip.disseminateAll(context, dso, pkgParams, pkgFile);
if (fileResults != null) {
if(fileResults!=null)
{
//Report total files created
System.out.println("\nCREATED a total of " + fileResults.size() + " dissemination package files.");
String choiceString = null;
//Ask if user wants full list printed to command line, as this may be rather long.
if (this.userInteractionEnabled) {
if(this.userInteractionEnabled)
{
BufferedReader input = new BufferedReader(new InputStreamReader(System.in));
System.out.print("\nWould you like to view a list of all files that were created? [y/n]: ");
choiceString = input.readLine();
} else {
}
else
{
// user interaction disabled -- default answer to 'yes', as
// we want to provide user with as detailed a report as possible.
choiceString = "y";
}
// Provide detailed report if user answered 'yes'
if (choiceString.equalsIgnoreCase("y")) {
if (choiceString.equalsIgnoreCase("y"))
{
System.out.println("\n\n");
for (File result : fileResults) {
for(File result : fileResults)
{
System.out.println("CREATED package file: " + result.getCanonicalPath());
}
}
}
} else {
}
else
{
//otherwise, just disseminate a single object to a single package file
dip.disseminate(context, dso, pkgParams, pkgFile);
if (pkgFile != null && pkgFile.exists()) {
if(pkgFile!=null && pkgFile.exists())
{
System.out.println("\nCREATED package file: " + pkgFile.getCanonicalPath());
}
}
}
/**
* Replace an one or more existing DSpace objects with the contents of
* specified package(s) based on the options passed to the 'packager' script.
* This method is only called for full replaces ('-r -f' options specified)
*
* @param context DSpace Context
* @param sip PackageIngester which will actually replace the object with the package
* @param pkgParams Parameters to pass to individual packager instances
* @param sourceFile location of the source package to ingest as the replacement
* @param context DSpace Context
* @param sip PackageIngester which will actually replace the object with the package
* @param pkgParams Parameters to pass to individual packager instances
* @param sourceFile location of the source package to ingest as the replacement
* @param objToReplace DSpace object to replace (may be null if it will be specified in the package itself)
* @throws IOException if IO error
* @throws SQLException if database error
* @throws IOException if IO error
* @throws SQLException if database error
* @throws FileNotFoundException if file doesn't exist
* @throws AuthorizeException if authorization error
* @throws CrosswalkException if crosswalk error
* @throws PackageException if packaging error
* @throws AuthorizeException if authorization error
* @throws CrosswalkException if crosswalk error
* @throws PackageException if packaging error
*/
protected void replace(Context context, PackageIngester sip, PackageParameters pkgParams, String sourceFile,
DSpaceObject objToReplace)
throws IOException, SQLException, FileNotFoundException, AuthorizeException, CrosswalkException,
PackageException {
protected void replace(Context context, PackageIngester sip, PackageParameters pkgParams, String sourceFile, DSpaceObject objToReplace)
throws IOException, SQLException, FileNotFoundException, AuthorizeException, CrosswalkException, PackageException
{
// make sure we have an input file
File pkgFile = new File(sourceFile);
if (!pkgFile.exists()) {
if(!pkgFile.exists())
{
System.out.println("\nPackage located at " + sourceFile + " does not exist!");
System.exit(1);
}
System.out.println("\nReplacing DSpace object(s) with package located at " + sourceFile);
if (objToReplace != null) {
if(objToReplace!=null)
{
System.out.println("Will replace existing DSpace " + Constants.typeText[objToReplace.getType()] +
" [ hdl=" + objToReplace.getHandle() + " ]");
" [ hdl=" + objToReplace.getHandle() + " ]");
}
// NOTE: At this point, objToReplace may be null. If it is null, it is up to the PackageIngester
// to determine which Object needs to be replaced (based on the handle specified in the pkg, etc.)
try {
try
{
//If we are doing a recursive replace, call replaceAll()
if (pkgParams.recursiveModeEnabled()) {
//ingest first object using package & recursively replace anything else that package references
// (child objects, etc)
if (pkgParams.recursiveModeEnabled())
{
//ingest first object using package & recursively replace anything else that package references (child objects, etc)
List<String> hdlResults = sip.replaceAll(context, objToReplace, pkgFile, pkgParams);
if (hdlResults != null) {
@@ -689,42 +739,52 @@ public class Packager {
String choiceString = null;
//Ask if user wants full list printed to command line, as this may be rather long.
if (this.userInteractionEnabled) {
if (this.userInteractionEnabled)
{
BufferedReader input = new BufferedReader(new InputStreamReader(System.in));
System.out.print("\nWould you like to view a list of all objects that were replaced? [y/n]: ");
choiceString = input.readLine();
} else {
}
else
{
// user interaction disabled -- default answer to 'yes', as
// we want to provide user with as detailed a report as possible.
choiceString = "y";
}
// Provide detailed report if user answered 'yes'
if (choiceString.equalsIgnoreCase("y")) {
if (choiceString.equalsIgnoreCase("y"))
{
System.out.println("\n\n");
for (String result : hdlResults) {
DSpaceObject dso = HandleServiceFactory.getInstance().getHandleService()
.resolveToObject(context, result);
for (String result : hdlResults)
{
DSpaceObject dso = HandleServiceFactory.getInstance().getHandleService().resolveToObject(context, result);
if (dso != null) {
if (dso != null)
{
System.out.println("REPLACED DSpace " + Constants.typeText[dso.getType()] +
" [ hdl=" + dso.getHandle() + " ] ");
" [ hdl=" + dso.getHandle() + " ] ");
}
}
}
}
} else {
}
else
{
//otherwise, just one object to replace
DSpaceObject dso = sip.replace(context, objToReplace, pkgFile, pkgParams);
if (dso != null) {
if (dso != null)
{
System.out.println("REPLACED DSpace " + Constants.typeText[dso.getType()] +
" [ hdl=" + dso.getHandle() + " ] ");
" [ hdl=" + dso.getHandle() + " ] ");
}
}
} catch (WorkflowException e) {
}
catch (WorkflowException e)
{
throw new PackageException(e);
}
}

View File

@@ -7,37 +7,26 @@
*/
package org.dspace.app.requestitem;
import java.util.Date;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.SequenceGenerator;
import javax.persistence.Table;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
import org.dspace.content.Bitstream;
import org.dspace.content.Item;
import org.dspace.core.Context;
import org.dspace.core.ReloadableEntity;
import javax.persistence.*;
import java.util.Date;
/**
* Object representing an Item Request
*/
@Entity
@Table(name = "requestitem")
@Table(name="requestitem")
public class RequestItem implements ReloadableEntity<Integer> {
@Id
@Column(name = "requestitem_id")
@GeneratedValue(strategy = GenerationType.SEQUENCE, generator = "requestitem_seq")
@SequenceGenerator(name = "requestitem_seq", sequenceName = "requestitem_seq", allocationSize = 1)
@Column(name="requestitem_id")
@GeneratedValue(strategy = GenerationType.SEQUENCE ,generator="requestitem_seq")
@SequenceGenerator(name="requestitem_seq", sequenceName="requestitem_seq", allocationSize = 1)
private int requestitem_id;
@ManyToOne(fetch = FetchType.LAZY)
@@ -54,9 +43,9 @@ public class RequestItem implements ReloadableEntity<Integer> {
@Column(name = "request_name", length = 64)
private String reqName;
// @Column(name = "request_message")
// @Column(name = "request_message")
// @Lob
@Column(name = "request_message", columnDefinition = "text")
@Column(name="request_message", columnDefinition = "text")
private String reqMessage;
@Column(name = "token", unique = true, length = 48)
@@ -82,10 +71,10 @@ public class RequestItem implements ReloadableEntity<Integer> {
/**
* Protected constructor, create object using:
* {@link org.dspace.app.requestitem.service.RequestItemService#createRequest(Context, Bitstream, Item,
* boolean, String, String, String)}
* {@link org.dspace.app.requestitem.service.RequestItemService#createRequest(Context, Bitstream, Item, boolean, String, String, String)}
*/
protected RequestItem() {
protected RequestItem()
{
}
public Integer getID() {

View File

@@ -14,16 +14,17 @@ import org.dspace.eperson.EPerson;
* Copy feature
*
* @author Andrea Bollini
*
*/
public class RequestItemAuthor {
private String fullName;
private String email;
private String fullName;
private String email;
public RequestItemAuthor(String fullName, String email) {
super();
this.fullName = fullName;
this.email = email;
}
public RequestItemAuthor(String fullName, String email) {
super();
this.fullName = fullName;
this.email = email;
}
public RequestItemAuthor(EPerson ePerson) {
super();
@@ -31,11 +32,11 @@ public class RequestItemAuthor {
this.email = ePerson.getEmail();
}
public String getEmail() {
return email;
}
public String getEmail() {
return email;
}
public String getFullName() {
return fullName;
}
public String getFullName() {
return fullName;
}
}

View File

@@ -17,8 +17,9 @@ import org.dspace.core.Context;
* request copy
*
* @author Andrea Bollini
*
*/
public interface RequestItemAuthorExtractor {
public RequestItemAuthor getRequestItemAuthor(Context context, Item item)
throws SQLException;
public RequestItemAuthor getRequestItemAuthor(Context context, Item item)
throws SQLException;
}

View File

@@ -7,8 +7,6 @@
*/
package org.dspace.app.requestitem;
import java.sql.SQLException;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.dspace.content.Item;
@@ -19,15 +17,16 @@ import org.dspace.eperson.EPerson;
import org.dspace.eperson.service.EPersonService;
import org.springframework.beans.factory.annotation.Autowired;
import java.sql.SQLException;
/**
* RequestItem strategy to allow DSpace support team's helpdesk to receive requestItem request
* With this enabled, then the Item author/submitter doesn't receive the request, but the helpdesk instead does.
*
* Failover to the RequestItemSubmitterStrategy, which means the submitter would get the request if there is no
* specified helpdesk email.
* Failover to the RequestItemSubmitterStrategy, which means the submitter would get the request if there is no specified helpdesk email.
*
* @author Sam Ottenhoff
* @author Peter Dietz
* @author Sam Ottenhoff
* @author Peter Dietz
*/
public class RequestItemHelpdeskStrategy extends RequestItemSubmitterStrategy {
@@ -36,13 +35,11 @@ public class RequestItemHelpdeskStrategy extends RequestItemSubmitterStrategy {
@Autowired(required = true)
protected EPersonService ePersonService;
public RequestItemHelpdeskStrategy() {
}
public RequestItemHelpdeskStrategy() {}
@Override
public RequestItemAuthor getRequestItemAuthor(Context context, Item item) throws SQLException {
boolean helpdeskOverridesSubmitter = ConfigurationManager
.getBooleanProperty("request.item.helpdesk.override", false);
boolean helpdeskOverridesSubmitter = ConfigurationManager.getBooleanProperty("request.item.helpdesk.override", false);
String helpDeskEmail = ConfigurationManager.getProperty("mail.helpdesk");
if (helpdeskOverridesSubmitter && StringUtils.isNotBlank(helpDeskEmail)) {
@@ -57,20 +54,19 @@ public class RequestItemHelpdeskStrategy extends RequestItemSubmitterStrategy {
* Return a RequestItemAuthor object for the specified helpdesk email address.
* It makes an attempt to find if there is a matching eperson for the helpdesk address, to use the name,
* Otherwise it falls back to a helpdeskname key in the Messages.props.
*
* @param context context
* @param context context
* @param helpDeskEmail email
* @return RequestItemAuthor
* @throws SQLException if database error
*/
public RequestItemAuthor getHelpDeskPerson(Context context, String helpDeskEmail) throws SQLException {
public RequestItemAuthor getHelpDeskPerson(Context context, String helpDeskEmail) throws SQLException{
EPerson helpdeskEPerson = null;
context.turnOffAuthorisationSystem();
helpdeskEPerson = ePersonService.findByEmail(context, helpDeskEmail);
context.restoreAuthSystemState();
if (helpdeskEPerson != null) {
if(helpdeskEPerson != null) {
return new RequestItemAuthor(helpdeskEPerson);
} else {
String helpdeskName = I18nUtil.getMessage(

View File

@@ -11,8 +11,8 @@ import java.sql.SQLException;
import java.util.List;
import org.apache.commons.lang.StringUtils;
import org.dspace.content.Item;
import org.dspace.content.MetadataValue;
import org.dspace.content.Item;
import org.dspace.content.service.ItemService;
import org.dspace.core.Context;
import org.dspace.core.I18nUtil;
@@ -23,6 +23,7 @@ import org.springframework.beans.factory.annotation.Autowired;
* Failover to the RequestItemSubmitterStrategy
*
* @author Andrea Bollini
*
*/
public class RequestItemMetadataStrategy extends RequestItemSubmitterStrategy {
@@ -32,44 +33,49 @@ public class RequestItemMetadataStrategy extends RequestItemSubmitterStrategy {
@Autowired(required = true)
protected ItemService itemService;
public RequestItemMetadataStrategy() {
}
public RequestItemMetadataStrategy() {
}
@Override
public RequestItemAuthor getRequestItemAuthor(Context context, Item item)
throws SQLException {
if (emailMetadata != null) {
List<MetadataValue> vals = itemService.getMetadataByMetadataString(item, emailMetadata);
if (vals.size() > 0) {
String email = vals.iterator().next().getValue();
String fullname = null;
if (fullNameMetadata != null) {
@Override
public RequestItemAuthor getRequestItemAuthor(Context context, Item item)
throws SQLException {
if (emailMetadata != null)
{
List<MetadataValue> vals = itemService.getMetadataByMetadataString(item, emailMetadata);
if (vals.size() > 0)
{
String email = vals.iterator().next().getValue();
String fullname = null;
if (fullNameMetadata != null)
{
List<MetadataValue> nameVals = itemService.getMetadataByMetadataString(item, fullNameMetadata);
if (nameVals.size() > 0) {
fullname = nameVals.iterator().next().getValue();
}
}
if (nameVals.size() > 0)
{
fullname = nameVals.iterator().next().getValue();
}
}
if (StringUtils.isBlank(fullname)) {
fullname = I18nUtil
.getMessage(
"org.dspace.app.requestitem.RequestItemMetadataStrategy.unnamed",
context);
}
RequestItemAuthor author = new RequestItemAuthor(
fullname, email);
return author;
}
}
return super.getRequestItemAuthor(context, item);
}
if (StringUtils.isBlank(fullname))
{
fullname = I18nUtil
.getMessage(
"org.dspace.app.requestitem.RequestItemMetadataStrategy.unnamed",
context);
}
RequestItemAuthor author = new RequestItemAuthor(
fullname, email);
return author;
}
}
return super.getRequestItemAuthor(context, item);
}
public void setEmailMetadata(String emailMetadata) {
this.emailMetadata = emailMetadata;
}
public void setEmailMetadata(String emailMetadata) {
this.emailMetadata = emailMetadata;
}
public void setFullNameMetadata(String fullNameMetadata) {
this.fullNameMetadata = fullNameMetadata;
}
public void setFullNameMetadata(String fullNameMetadata) {
this.fullNameMetadata = fullNameMetadata;
}
}

View File

@@ -7,9 +7,6 @@
*/
package org.dspace.app.requestitem;
import java.sql.SQLException;
import java.util.Date;
import org.apache.log4j.Logger;
import org.dspace.app.requestitem.dao.RequestItemDAO;
import org.dspace.app.requestitem.service.RequestItemService;
@@ -19,6 +16,9 @@ import org.dspace.core.Context;
import org.dspace.core.Utils;
import org.springframework.beans.factory.annotation.Autowired;
import java.sql.SQLException;
import java.util.Date;
/**
* Service implementation for the RequestItem object.
* This class is responsible for all business logic calls for the RequestItem object and is autowired by spring.
@@ -33,13 +33,13 @@ public class RequestItemServiceImpl implements RequestItemService {
@Autowired(required = true)
protected RequestItemDAO requestItemDAO;
protected RequestItemServiceImpl() {
protected RequestItemServiceImpl()
{
}
@Override
public String createRequest(Context context, Bitstream bitstream, Item item, boolean allFiles, String reqEmail,
String reqName, String reqMessage) throws SQLException {
public String createRequest(Context context, Bitstream bitstream, Item item, boolean allFiles, String reqEmail, String reqName, String reqMessage) throws SQLException {
RequestItem requestItem = requestItemDAO.create(context, new RequestItem());
requestItem.setToken(Utils.generateHexKey());
@@ -53,9 +53,10 @@ public class RequestItemServiceImpl implements RequestItemService {
requestItemDAO.save(context, requestItem);
if (log.isDebugEnabled()) {
if (log.isDebugEnabled())
{
log.debug("Created requestitem_token " + requestItem.getID()
+ " with token " + requestItem.getToken() + "\"");
+ " with token " + requestItem.getToken() + "\"");
}
return requestItem.getToken();
}

View File

@@ -17,19 +17,20 @@ import org.dspace.eperson.EPerson;
* Basic strategy that looks to the original submitter.
*
* @author Andrea Bollini
*
*/
public class RequestItemSubmitterStrategy implements RequestItemAuthorExtractor {
public RequestItemSubmitterStrategy() {
}
public RequestItemSubmitterStrategy() {
}
@Override
public RequestItemAuthor getRequestItemAuthor(Context context, Item item)
throws SQLException {
EPerson submitter = item.getSubmitter();
RequestItemAuthor author = new RequestItemAuthor(
submitter.getFullName(), submitter.getEmail());
return author;
}
@Override
public RequestItemAuthor getRequestItemAuthor(Context context, Item item)
throws SQLException {
EPerson submitter = item.getSubmitter();
RequestItemAuthor author = new RequestItemAuthor(
submitter.getFullName(), submitter.getEmail());
return author;
}
}

Some files were not shown because too many files have changed in this diff Show More