Compare commits

...

274 Commits

Author SHA1 Message Date
Mark H. Wood
e5cb62997a [maven-release-plugin] prepare release dspace-6.2 2017-09-07 16:14:03 -04:00
Mark H. Wood
ff7c7e3d6d Regenerate third-party license list 2017-09-07 15:16:09 -04:00
Mark H. Wood
068047cb11 Update LICENSE copyright claim with current year. 2017-09-07 15:08:17 -04:00
Terry Brady
0e7c7c0886 Merge pull request #1836 from Generalelektrix/dspace-6_x
DS-3687
2017-09-06 12:14:19 -07:00
Generalelektrix
6626901564 DS-3687
Making key generic for legacy note value since it is not only used in jspui.
2017-09-06 10:28:14 -04:00
Generalelektrix
6b8f072d3e DS-3687 Hard coded note not compatible with multi-lingual sites for legacy stats
Changed hard coded string for reference to a new field in language bundle.
2017-09-06 10:18:50 -04:00
Tim Donohue
fa587c52ed Merge pull request #1830 from tuub/DS-3680
DS-3680: Database changes of consumers aren't persisted anymore
2017-08-31 06:41:19 +10:00
Pascal-Nicolas Becker
d753c09b22 DS-3680: Remove problematic unaching. Also see DS-3681 as follow-up. 2017-08-24 18:25:41 +02:00
Pascal-Nicolas Becker
fbb45ba758 DS-3680: clarify that we need to dispatch events before committing 2017-08-24 18:25:20 +02:00
Pascal-Nicolas Becker
014456e1ed Revert "Events must be dispatched after commit() to ensure they can retrieve latest data from DB"
This reverts commit 646936a3d8.
2017-08-24 18:22:58 +02:00
Terry Brady
258b4f00e9 [DS-3602] Ensure Consistent Use of Legacy Id in Usage Queries (#1782)
* ensure that owning Item,Coll,Comm use legacy consistently

* scopeId query

* refine queries

* alter id query

* Commenting the behavior of the id / legacyId search

* Address duplicate disp for DSO w legacy and uuid stats
2017-08-17 23:48:25 +10:00
Tim Donohue
3798a12778 Merge pull request #1824 from tdonohue/DS-3656_and_DS-3648
DS-3656 and DS-3648 : Fix several Hibernate caching / saving issues
2017-08-17 06:53:28 +10:00
Hardy Pottinger
bc82adef5e [DS-3674] copied over input-forms.xml to the test config folder 2017-08-15 14:43:41 -05:00
Tim Donohue
d4d61eed68 Replace dispatchEvents() call with an actual commit() to ensure changes are saved 2017-08-10 21:27:34 +00:00
Tim Donohue
646936a3d8 Events must be dispatched after commit() to ensure they can retrieve latest data from DB 2017-08-10 21:27:00 +00:00
Tim Donohue
9dd6bb0f08 DS-3648: Don't uncache submitter and related groups. Also DS-3656: Flush changes before evict() 2017-08-10 21:25:38 +00:00
Terry Brady
0e2ed31deb Merge pull request #1821 from Georgetown-University-Libraries/ds3661r6x
[DS-3661] Port to 6x: ImageMagick PDF Processing Degraded with Color Space Changes
2017-08-09 13:18:55 -07:00
Terry W Brady
1492dfef92 Normalize space 2017-08-09 13:02:04 -07:00
Terry W Brady
8b6c1acab1 Port PR1817, Only request image info if color space 2017-08-09 13:01:17 -07:00
Alan Orth
e88924b7da DS-3517 Allow improved handling of CMYK PDFs
Allow ImageMagick to generate thumbnails with more accurate colors
for PDFs using the CMYK color system. This adds two options to the
dspace.cfg where the user can optionally specify paths to CMYK and
RGB color profiles if they are available on their system (they are
provided by Ghostscript 9.x).

Uses im4java's Info class to determine the color system being used
by the PDF.

See: http://im4java.sourceforge.net/docs/dev-guide.html
2017-08-09 19:45:28 +00:00
Terry Brady
42608e028e Merge pull request #1816 from AlexanderS/fix-discovery-reindex
DS-3660: Fix discovery reindex on metadata change
2017-08-09 12:08:31 -07:00
Alexander Sulfrian
7e68165ded DS-3660: Fix discovery reindex on metadata change
Stored objects may get evicted from the session cache and get into detached
state. Lazy loaded fields are inaccessible and throw an exception on access.

Before using objects they have to be reloaded (retrieved from the
database and associated with the session again).
2017-08-03 16:25:39 +02:00
Tim Donohue
cfecf10e81 Merge pull request #1815 from tdonohue/DS-3659
DS-3659: Database migrate fails to create the initial groups
2017-08-03 23:51:47 +10:00
Alexander Sulfrian
5d656ea922 XMLUI: Remove doubled translation key (#1818)
The key "xmlui.ChoiceLookupTransformer.lookup" is already in line 2368 of the
same file.
2017-08-03 15:23:49 +02:00
Tim Donohue
62e2ac81fb Merge pull request #1814 from AlexanderS/fix/i18n-key-typo
XMLUI/SwordClient: Fix typo in i18n key
2017-08-02 07:33:05 +10:00
Tim Donohue
e9ace604a7 DS-3659: Ensure readonly connections can never rollback 2017-08-01 18:00:28 +00:00
Alexander Sulfrian
7f91528c1a XMLUI/SwordClient: Fix typo in i18n key 2017-07-25 15:21:10 +02:00
Tim Donohue
4881e9da20 [maven-release-plugin] prepare for next development iteration 2017-07-13 12:15:12 -05:00
Tim Donohue
eb4d56201a [maven-release-plugin] prepare release dspace-6.1 2017-07-13 12:15:02 -05:00
Tim Donohue
df9fb114ba Merge pull request #1807 from tdonohue/travis-fixes
Pin versions of SASS and Compass that Travis CI uses
2017-07-14 02:58:35 +10:00
Tim Donohue
f3556278aa Pin versions of SASS and Compass that Travis uses 2017-07-13 16:28:35 +00:00
Tim Donohue
f6af76c6d8 Revert 6.1 release 2017-07-13 14:15:21 +00:00
Tim Donohue
151a5f8fe2 [maven-release-plugin] prepare for next development iteration 2017-07-12 20:55:13 +00:00
Tim Donohue
57044f6698 [maven-release-plugin] prepare release dspace-6.1 2017-07-12 20:55:07 +00:00
Tim Donohue
4954f96f1d Merge pull request #1785 from atmire/DS-3127-DSpace-6_Whitelist-allowable-formats-Google-Scholar-citation_pdf_url
DS-3127 Whitelist allowable formats google scholar citation pdf url
2017-07-12 06:40:45 +10:00
Tim Donohue
972f76e771 Merge pull request #1790 from tomdesair/DS-3632_Correct-update-handle-prefix-script
DS-3632: Correct update-handle-prefix script
2017-07-12 06:27:08 +10:00
Tim Donohue
e30b0cdec6 DS-3431 : Fix broken tests by removing nullifying of global eperson 2017-07-11 16:13:25 +00:00
Pascal-Nicolas Becker
a0f226b763 [DS-3431] Harden DSpace's BasicWorfklowService 2017-07-11 16:10:08 +00:00
Tim Donohue
bcf3110db9 Merge pull request #1723 from atmire/DS-2359
DS-2359 Error when depositing large files via browser (over 2Gb)
2017-07-08 05:56:33 +10:00
Tom Desair
c34b277c8d DS-3628: Check READ resouce policies for items return by REST find-by-metadata-field endpoint 2017-07-07 19:47:26 +00:00
Pascal-Nicolas Becker
6263444f79 DS-3619: AuthorizeService.getAuthorizedGroups(...) should check dates 2017-07-07 19:30:00 +00:00
Tim Donohue
9caff2caab Merge pull request #1799 from tdonohue/DS-3397-6x
[DS-3397] Fix error when getting bitstream policies in REST API (6.x version)
2017-07-07 02:47:42 +10:00
Tim Donohue
6151f4f594 Merge pull request #1798 from atmire/DS-3563-DSpace-6_Missing-index-metadatavalue-resource-type-id
DS-3563: Fix Oracle Flyway migration error
2017-07-06 01:34:38 +10:00
Tim Donohue
f953848a6d [DS-3397] Add null checks to EPerson and Group 2017-07-05 15:27:43 +00:00
Tom Desair
ccc1b1b784 DS-3563: Fix Oracle Flyway migration error 2017-07-05 14:02:29 +02:00
Tom Desair
1bb6369ad6 DS-3127: Update test assert descriptions of GoogleBitstreamComparatorTest 2017-07-04 16:07:57 +02:00
Tom Desair
e31daa0230 DS-3632: Prevent the use of the locate function as this seems to give inconsistent results 2017-06-30 17:13:31 +02:00
Tom Desair
762197b452 DS-3632: Changed the update-handle-prefix script so that it does not change the handle suffix 2017-06-30 16:58:15 +02:00
kshepherd
ecd0230943 Merge pull request #1780 from atmire/DS-3595-6x
DS-3595
2017-06-30 05:41:42 +10:00
Philip Vissenaekens
c9cad9083e Merge branch 'dspace-6_x' into DS-3595-6x 2017-06-29 15:38:20 +02:00
Tom Desair
b462e0ac6d Merge branch 'dspace-6_x' into DS-3127-DSpace-6_Whitelist-allowable-formats-Google-Scholar-citation_pdf_url 2017-06-29 09:55:42 +02:00
Terry Brady
65d638771f Merge pull request #1747 from AlexanderS/localization-input-forms-xmlui
DS-3598: Allow localization of input-forms.xml with XMLUI
2017-06-28 17:15:40 -07:00
Terry Brady
224df82087 Merge pull request #1752 from AlexanderS/fix/DS-3601-npe-feedback-page
DS-3601: Fix NPE when accessing feedback page without "Referer" header
2017-06-28 16:31:44 -07:00
Terry Brady
a6b3ce0d46 Merge pull request #1784 from rivaldi8/DS-3245-csv-linebreaks_ds6
DS-3245: CSV linebreaks not supported by Bulkedit -  DSpace 6
2017-06-28 15:47:39 -07:00
Terry Brady
2944279618 Merge pull request #1727 from tomdesair/DS-3579_Context-mode-and-cache-management-CLI-commands
DS-3579 Context mode and cache management for CLI commands
2017-06-28 14:49:11 -07:00
Tom Desair
fe115125d1 DS-3127: Prevent database updates when directly manipulating the bistream list of a bundle 2017-06-28 17:46:58 +02:00
Tom Desair
6e9dec2c85 DS-3579: Make sure context.complete() can be called when in read-only 2017-06-28 16:15:30 +02:00
Terry Brady
fd298ae462 Merge pull request #1772 from tomdesair/DS-3571_Log-Hibernate-validation-errors
DS-3571 Log hibernate validation errors
2017-06-27 15:22:44 -07:00
Mark H. Wood
470c9b8f50 Merge pull request #1788 from mwoodiupui/DS-3568
[DS-3568] UTF-8 characters are now supported in configuration files
2017-06-26 13:34:04 -04:00
Terry Brady
33d3df72d6 Merge pull request #1732 from samuelcambien/DS-3584
DS-3584 when editing an eperson, trying to change its email address is ignored if another user already has that email address.
2017-06-23 16:56:27 -07:00
Christian Scheible
43cc3bd874 DS-3568. UTF-8 characters are now supported in configuration files 2017-06-22 16:35:30 -04:00
Tom Desair
3dc4909935 Fix IT tests 2017-06-22 17:07:55 +02:00
Tom Desair
71791c720f DS-3127: Process review feedback and fix tests 2017-06-22 15:01:45 +02:00
Àlex Magaz Graça
70a5124373 DS-3245: CSV linebreaks not supported by Bulkedit
When a multiline field contained empty lines, the importer stopped
reading the file. This reverts a change in 53d387fed to stop when the
end of the file has been reached instead.

Fixes https://jira.duraspace.org/browse/DS-3245
2017-06-22 13:57:06 +02:00
Philip Vissenaekens
7879ecdf14 DS-3595 2017-06-21 17:18:30 +02:00
Mark H. Wood
1db3261b54 Merge pull request #1696 from tomdesair/DS-2748_Improve-cocoon-page-not-found-page
DS-2748: Do not throw an exception in the PageNotFoundTransformer
2017-06-21 10:18:53 -04:00
Tom Desair
3732cafc4e Merge branch 'dspace-6_x' into DS-3579_Context-mode-and-cache-management-CLI-commands 2017-06-19 17:36:55 +02:00
Tom Desair
6f52d9700a Merge branch 'dspace-6_x' into DS-3579_Context-mode-and-cache-management-CLI-commands 2017-06-19 17:18:22 +02:00
Tom Desair
769d3b590f DS-3579: Fix bug in metadata-import script 2017-06-19 14:59:00 +02:00
Tom Desair
7d04016436 Merge branch 'DS-3579_Context-mode-and-cache-management-CLI-commands' of https://github.com/tomdesair/DSpace into DS-3579_Context-mode-and-cache-management-CLI-commands 2017-06-19 14:38:28 +02:00
edusperoni
0084ae3833 DS-2291 Autocomplete not working on Mirage2 (#1741)
* fixing autocomplete problem listed on DS-2291. Also fixes the spinner that was being referenced in the wrong path.

* fix common lookup button (now consistent with the author lookup button)
2017-06-14 11:36:45 -05:00
Pascal-Nicolas Becker
fc1b22e59c Merge pull request #1767 from tomdesair/PR-1715
DS-3572: Check authorization for a specified user instead of currentUser
2017-06-13 16:08:33 +02:00
Tom Desair
9af33bc244 DS-3571: Make sure that any Hibernate schema validation error is logged instead of just a NullPointerException 2017-06-13 11:17:20 +02:00
Tom Desair
bd2d81d556 DS-3572: Renamed epersonInGroup to isEPersonInGroup 2017-06-12 15:17:59 +02:00
Tom Desair
f6eb13cf53 DS-3572: Restored behaviour of GroupService.isMember and moved new behaviour to GroupService.isParentOf 2017-06-12 15:05:59 +02:00
Tom Desair
b4a24fff7b DS-3572: Fix bug where normal group membership is ignored if special groups are present + added tests 2017-06-10 14:32:45 +02:00
Tom Desair
8bb7eb0fe5 Improve tests + make GroupService.isMember method more performant for special groups 2017-06-10 00:34:24 +02:00
Tom Desair
f48178ed41 Fix DSpace AIP IT tests: Set correct membership for admin 2017-06-09 20:09:15 +02:00
Tim Donohue
1b70e64f77 Merge pull request #1751 from tomdesair/DS-3406_Sort-Communities-and-Collections-Hibernate-Sort-Annotation
DS-3406: Sort communities and collections iteration 2
2017-06-09 09:35:00 -07:00
Tom Desair
b56bb4de3e Attempt to fix contstraint violation 2017-06-09 17:51:27 +02:00
Tom Desair
139f01fffd Restore GroupServiceImpl.isMember logic + fix tests 2017-06-09 17:30:06 +02:00
frederic
257d75ca0c DS-3406 unit tests for getCollection/getCommunity for different dspace objects 2017-06-09 10:05:36 +02:00
frederic
5422a63f08 DS-3579 removed FETCH keyword and fixed typo in help message of harvest 2017-06-09 09:46:28 +02:00
Pascal-Nicolas Becker
853e6baff1 Merge pull request #1761 from tdonohue/DS-3604
DS-3604: Fix Bitstream reordering in JSPUI
2017-06-06 23:08:06 +02:00
Tim Donohue
205d8b9f92 Refactor BundleServiceImpl.setOrder() to be more failsafe. Update Tests to prove out (previously these new tests failed) 2017-06-06 14:07:16 +00:00
Pascal-Nicolas Becker
bb1e13a3b2 DS-3572: Adding simple unit test for DS-3572. 2017-06-06 15:54:13 +02:00
Pascal-Nicolas Becker
d2311663d3 DS-3572: Check authorization for a specified user instead of currentUser 2017-06-06 15:54:12 +02:00
kshepherd
7d1836bddc Merge pull request #1762 from Georgetown-University-Libraries/ds3563-6x
[DS-3563] Port PR to 6x
2017-06-06 12:36:46 +12:00
Tom Desair
36002b5829 DS-3563: Conditional create index for Oracle 2017-06-02 13:19:02 -07:00
Tom Desair
6392e195b9 DS-3563 Added missing index on metadatavalue.resource_type_id 2017-06-02 13:18:43 -07:00
Tim Donohue
d37d3a04ac Create a valid unit test for BundleServiceImpl.setOrder() method 2017-06-02 20:14:29 +00:00
Tim Donohue
ef3afe19eb DS-3604: Sync JSPUI bitstream reorder code with XMLUI code 2017-06-02 19:50:14 +00:00
Pascal-Nicolas Becker
81e171ec24 Merge pull request #1760 from tuub/DS-3582
DS-3582: Reintroduce calls to context.abort() at the end of some JSPs to free db resources.
2017-06-02 12:54:29 +02:00
Pascal-Nicolas Becker
4086e73e0b DS-3582: Any jsp that call UIUtil.obtainContext must free DB resources
Any jsp that call UIUtil.obationContext must either call context.abort
or context.commit to free the database connection to avoid exhausting
the database connection pool.
2017-06-01 17:37:30 +02:00
Tim Donohue
5f827ecbe8 Merge pull request #1759 from AlexanderS/rest-submissions-to-workflow
DS-3281: Start workflow for REST submissions
2017-05-31 13:52:42 -07:00
Alexander Sulfrian
30c4ca0fea DS-3281: Start workflow for REST submissions
If an item is submitted through the REST API (via POST on
/{collection_id}/items) the item should not be published immediately,
but should be approved via the defined workflow.
2017-05-31 18:27:44 +02:00
Terry Brady
094f775b6a Merge pull request #1746 from Georgetown-University-Libraries/ds3594
[DS-3594] Refine unit tests to run against postgres
2017-05-31 08:59:14 -07:00
Terry Brady
593cc085d2 Add comment for null check during sort 2017-05-23 10:23:16 -07:00
Tom Desair
f4cdfb4e65 Revert imports 2017-05-22 17:35:03 +02:00
Tom Desair
b4d8436672 DS-3406: Remove unnecessary commit 2017-05-22 17:17:03 +02:00
Tom Desair
271b6913ab Fix integration tests. Remove Hibernate Sort annotations as a collection name can change and this breaks the Set semantics 2017-05-22 15:06:44 +02:00
Alexander Sulfrian
137384c13f DS-3601: Fix NPE when accessing feedback page without "Referer" header 2017-05-22 12:24:31 +02:00
Tom Desair
72f8f9461b Fix bug so that comparator can be used for sets 2017-05-22 10:52:15 +02:00
Tom Desair
78effeac61 Fixing tests 2017-05-22 09:39:13 +02:00
Yana De Pauw
62c804f1e8 DS-3406: Ordering sub communities and collections 2017-05-22 09:39:12 +02:00
Tim Donohue
40b05ec773 Fix minor compilation error in cherry-pick of PR#1662 2017-05-18 21:03:35 +00:00
Miika Nurminen
a0e91cacd9 [DS-3463] Fix IP authentication for anonymous users
Added group membership check based on context even if no eperson is found. Affects file downloads in (at least) xmlui.
2017-05-18 20:12:34 +00:00
Alexander Sulfrian
90ca4deb35 Fix code style 2017-05-18 11:20:15 +02:00
Alexander Sulfrian
83002c3177 DS-3598: Allow localization of input-forms.xml with XMLUI
This allows separate input-forms.xml for the different locales with
XMLUI. The feature was already present in JSPUI.
2017-05-17 16:05:14 +02:00
Terry Brady
ebf256caa1 Avoid NPE 2017-05-15 14:37:59 -07:00
Terry Brady
1d655e97c9 Make destroy more forgiving of test failures 2017-05-15 14:31:41 -07:00
Terry Brady
d85a2d9153 Avoid handle collision in persistent db 2017-05-15 14:19:39 -07:00
Terry Brady
6f8a8b7f25 change parameter setting for db portability 2017-05-15 13:47:20 -07:00
Generalelektrix
3ea041d4dc DS-3164 Item statistic displays UUID of bitstreams instead of name (#1744)
simple change to return bit.getName() as opposed to return value
2017-05-10 17:16:50 -04:00
Tom Desair (Atmire)
6333fb6706 Ds 3552 read only context and hibernate improvements (#1694)
* Refactor READ ONLY mode in Context and adjust hibernate settings accordingly

* Set Context in READ-ONLY mode when retrieving community lists

* Fix Hibernate EHCache configuration + fix some Hibernate warnings

* Cache authorized actions and group membership when Context is in READ-ONLY mode

* Set default Context mode

* Let ConfigurableBrowse use a READ-ONLY context

* Add 2nd level cache support for Site and EPerson DSpaceObjects

* Added 2nd level caching for Community and Collection

* Fix tests and license checks

* Cache collection and community queries

* Small refactorings + backwards compatibility

* Set Context to READ-ONLY for JSPUI submissions and 'select collection' step

* OAI improvements part 1

* OAI indexing improvements part 1

* OAI indexing improvements part 2

* DS-3552: Only uncache resource policies in AuthorizeService when in read-only

* DS-3552: Additional comment on caching handles

* DS-3552: Fix cache leakage in SolrServiceResourceRestrictionPlugin

* DS-3552: Clear the read-only cache when switching Context modes

* DS-3552: Correct Group 2nd level cache size

* DS-3552: Always clear the cache, except when going from READ_ONLY to READ_ONLY
2017-05-04 14:12:06 -04:00
Hardy Pottinger
f62c32efe6 Merge pull request #1739 from edusperoni/handlebars-4
DS-3387 Upgrade handlebars to v4.
2017-05-04 12:28:15 -04:00
Hardy Pottinger
068be33265 Merge pull request #1707 from Frederic-Atmire/DS-3558
DS 3558 Case-insensitive bot matching option
2017-05-04 10:08:59 -04:00
Eduardo Speroni
3c25e04c08 upgrade grunt-contrib-handlebars to 1.0.0 2017-05-03 21:11:58 -03:00
Pascal-Nicolas Becker
a44b109f7a Merge pull request #1684 from tomdesair/DS-3406_Sort-Communities-and-Collections-with-comparator
DS-3406: Sort communities and collections in-memory using a comparator
2017-05-03 14:37:24 +02:00
frederic
a24b0078c2 Made service for SpringDetector and made SpringDetector delegate to it 2017-05-03 11:15:35 +02:00
Tom Desair
e358cb84d1 DS-3406: Resolve review feedback 2017-05-02 17:59:25 +02:00
frederic
0f51d5ad6a ported DS-3558 from dspace 5 to dspace6 2017-05-02 10:52:59 +02:00
frederic
454b0c9d6a Few tests to test case-(in)sensitive matching 2017-04-28 09:57:22 +02:00
frederic
6e1a5d1df9 made the necessary changes to easily test this class 2017-04-28 09:56:43 +02:00
frederic
b61c821e66 case-insensitive option commented out by default 2017-04-28 09:56:16 +02:00
frederic
fd76b587be wrote tests for botmatching 2017-04-27 14:24:07 +02:00
Eduardo Speroni
f12006fe21 Upgrade handlebars to v4.
Fixed advanced filters to work with handlebars v4. (https://github.com/wycats/handlebars.js/issues/1028)
2017-04-26 16:55:49 -03:00
Tim Donohue
3116c53d5e Merge pull request #1737 from cjuergen/DS-3585-6_x
Fix for DS3585
2017-04-26 11:09:14 -07:00
cjuergen
e2ffbaa3b8 Fix for DS3585 2017-04-26 15:49:28 +02:00
samuel
856e5ad388 DS-3584 when editing an eperson, trying to change its email address is ignored if another user already has that email address 2017-04-26 11:36:08 +02:00
Tom Desair
d2577fa16c DS-3579: Fix tests 2017-04-21 11:45:55 +02:00
Tom Desair
d5f9d9b0db DS-3579: Improve cache usage rdfizer, sub-daily, doi organiser 2017-04-21 11:45:55 +02:00
Tom Desair
e4b26d64ce DS-3579: Improve cache usage harvest 2017-04-21 11:45:55 +02:00
Tom Desair
2dde39abe7 DS-3579: Improve cache usage bitstore-migrate, cleanup, curate, embargo-lifter 2017-04-21 11:45:55 +02:00
Tom Desair
a715ae4d15 DS-3579: Improve cache usage export, import, itemupdate, metadata-export, packager 2017-04-21 11:45:55 +02:00
Tom Desair
e63b3f4c13 DS-3579: Improve cache usage export, import, itemupdate, metadata-export, packager 2017-04-21 11:45:54 +02:00
Tom Desair
acedcacdb3 DS-3579: Improve cache usage update-handle-prefix 2017-04-21 11:45:54 +02:00
Tom Desair
37219a986d DS-3579: checker, checker-emailer, filter-media, generate-sitemaps, index-authority 2017-04-21 11:45:54 +02:00
Tom Desair
a3fc30ad94 DS-3579: Fix tests 2017-04-20 21:55:28 +02:00
Terry Brady
e2862b3058 Merge pull request #1714 from tuub/DS-3575
DS-3575: Rename misguiding find method in ResourcePolicyService
2017-04-20 11:47:20 -07:00
Mark H. Wood
8442e6f395 Merge pull request #1717 from mwoodiupui/DS-3564
[DS-3564] Limit maximum idle database connections by default
2017-04-20 12:39:11 -04:00
Tom Desair
7e1a0a1a0c DS-3552: Fix cache leakage in SolrServiceResourceRestrictionPlugin 2017-04-20 17:40:24 +02:00
Tom Desair
a5d414c0b2 DS-3552: Additional comment on caching handles 2017-04-20 17:36:10 +02:00
Tom Desair
cabb4fab66 DS-3579: Improve cache usage rdfizer, sub-daily, doi organiser 2017-04-20 17:33:07 +02:00
Tom Desair
5c19bb52e0 DS-3579: Improve cache usage harvest 2017-04-20 17:32:26 +02:00
Tom Desair
1e62dfdbbc DS-3579: Improve cache usage bitstore-migrate, cleanup, curate, embargo-lifter 2017-04-20 17:31:49 +02:00
Tom Desair
867ab6c9b9 DS-3579: Improve cache usage export, import, itemupdate, metadata-export, packager 2017-04-20 17:30:37 +02:00
Tom Desair
392dd2653a DS-3579: Improve cache usage export, import, itemupdate, metadata-export, packager 2017-04-20 17:30:07 +02:00
Tom Desair
6f3546f844 DS-3579: Improve cache usage update-handle-prefix 2017-04-20 17:28:28 +02:00
Tim Donohue
9a0d293abf Merge pull request #1720 from Georgetown-University-Libraries/ds3516-6x
[DS-3516] 6x Port ImageMagick PDF Thumbnail class should only process PDFs
2017-04-20 06:56:08 -07:00
Philip Vissenaekens
782a963916 DS-2359 2017-04-20 13:10:39 +02:00
Tom Desair
0235ba391f DS-3579: checker, checker-emailer, filter-media, generate-sitemaps, index-authority 2017-04-20 10:41:51 +02:00
Alan Orth
eae5a96179 port PR1709 to 6x 2017-04-19 14:44:28 -07:00
Mark H. Wood
1ef1170159 [DS-3564] Limit maximum idle database connections by default 2017-04-19 14:56:44 -04:00
Tim Donohue
4f7410232a Merge pull request #1682 from tuub/DS-3535
[DS-3535] Reduced error logging by interrupted download
2017-04-19 09:45:05 -07:00
Tim Donohue
6c29cd61b6 Merge pull request #1699 from enrique/patch-1
DS-3554: Check for empty title in Submissions
2017-04-19 09:32:06 -07:00
Tim Donohue
f6a651d4df Merge pull request #1703 from samuelcambien/DS-3553
DS-3553: when creating a new version, do context complete before redirecting to the submission page
2017-04-19 09:27:14 -07:00
Tim Donohue
c57b443611 Merge pull request #1713 from atmire/DS-3573-Filtername-in-XMLUI-Discovery-filter-labels-dspace6
DS-3573: Filtername in XMLUI Discovery filter labels
2017-04-19 09:19:54 -07:00
Pascal-Nicolas Becker
a5bdff0803 DS-3575: Rename misguiding find method in ResourcePolicyService 2017-04-18 18:12:32 +02:00
samuel
e3f72b280d DS-3553: when creating a new version, do context complete before redirecting to the submission page 2017-04-18 11:01:47 +02:00
Yana De Pauw
63ed4cc1e0 DS-3573: Filtername in XMLUI Discovery filter labels 2017-04-14 15:26:08 +02:00
Tom Desair
f0a5e7d380 DS-3552: Only uncache resource policies in AuthorizeService when in read-only 2017-04-14 09:26:08 +02:00
Tom Desair
1e64850af2 OAI indexing improvements part 2 2017-04-14 00:40:19 +02:00
Tom Desair
d9db5a66ca OAI indexing improvements part 1 2017-04-14 00:21:03 +02:00
Tom Desair
5f77bd441a OAI improvements part 1 2017-04-13 17:44:21 +02:00
frederic
4b87935cbb DS-3558 removed duplicate code and changed default option 2017-04-13 16:27:19 +02:00
Tim Donohue
3db74c7ba3 Merge pull request #1671 from mwoodiupui/DS-3505
[DS-3505] Bad redirection from logout action
2017-04-12 13:37:17 -07:00
frederic
f000b280c1 DS-3558 added comments on code 2017-04-12 15:04:57 +02:00
frederic
cad79dc6c9 DS-3558 made case insensitive botsearch configurable and optimized case insensitive pattern matching 2017-04-12 14:29:58 +02:00
Enrique Martínez Zúñiga
794600b96e Fix for DS-3554
Use StringUtils.isNotBlank instead of only check for title.lenght
2017-04-05 09:31:20 -05:00
Tom Desair
044ba1acd3 DS-2748: Do not throw an exception in the PageNotFoundTransformer but do return a 404 error code 2017-04-05 15:45:32 +02:00
Tom Desair
f54fe5c12e Set Context to READ-ONLY for JSPUI submissions and 'select collection' step 2017-04-05 15:23:16 +02:00
Tom Desair
1e917ed845 Small refactorings + backwards compatibility 2017-04-05 11:02:58 +02:00
Tom Desair
7719848d47 Cache collection and community queries 2017-04-05 09:59:31 +02:00
Tom Desair
f0e9e04a3a Fix tests and license checks 2017-04-04 13:44:38 +02:00
Tom Desair
5f194334ff Added 2nd level caching for Community and Collection 2017-04-04 13:16:13 +02:00
Tom Desair
7371a7c71d Add 2nd level cache support for Site and EPerson DSpaceObjects 2017-04-03 16:21:14 +02:00
Tom Desair
3963c95f6e Let ConfigurableBrowse use a READ-ONLY context 2017-04-03 15:59:13 +02:00
Tom Desair
75497f5107 Set default Context mode 2017-04-03 15:54:18 +02:00
Tom Desair
852c4d3b62 Cache authorized actions and group membership when Context is in READ-ONLY mode 2017-04-03 15:26:29 +02:00
Tom Desair
d108464a3a Fix Hibernate EHCache configuration + fix some Hibernate warnings 2017-04-03 15:26:29 +02:00
Tom Desair
dbfc8ce9a7 Set Context in READ-ONLY mode when retrieving community lists 2017-04-03 15:26:28 +02:00
Tom Desair
eee4923518 Refactor READ ONLY mode in Context and adjust hibernate settings accordingly 2017-04-03 15:26:28 +02:00
Toni Prieto
9ef505498b [DS-2947] DIM crosswalks repeats authority & confidence values in the metadata values 2017-03-24 16:16:31 +00:00
Tom Desair
3540fe5ec6 DS-3406: Sort communities and collections in-memory using a comparator 2017-03-23 15:27:02 +01:00
Tim Donohue
57f2a10da1 Merge pull request #1663 from mwoodiupui/DS-1140
[DS-1140] Update MSWord Media Filter to use Apache POI (like PPT Filter) and also support .docx
2017-03-22 10:31:35 -05:00
Per Broman
1e33e27a84 [DS-3535] Reduced error logging by interrupted download 2017-03-21 10:29:06 +01:00
Pascal-Nicolas Becker
a54bf11b8c Merge pull request #1673 from tuub/DS-3523
[DS-3523] Bugfix for search with embargoed thumbnails
2017-03-09 12:38:58 +01:00
Per Broman
0601e9f061 [DS-3523] Bugfix for search with embargoed thumbnails 2017-03-09 12:07:52 +01:00
Mark H. Wood
b578abd054 [DS-3505] On logout redirect to dspace.url, not context path. 2017-03-08 15:51:01 -05:00
Terry Brady
bc8629b145 [DS-3348] Drop date check in EmbargoService (#1542)
* Drop date check in EmbargoService

* Revise comment per review
2017-03-08 18:29:12 +00:00
Peter Dietz
26859b1133 DS-3366 Fix handleresolver by removing out.close (#1560) 2017-03-08 18:25:38 +00:00
Andrea Schweer
97785d778f [DS-3336] Properly sort collections in move item drop-down 2017-03-08 18:08:30 +00:00
Terry Brady
f1c3a9d919 fix typo in comment 2017-03-08 17:44:30 +00:00
Terry Brady
6442c979aa First attempt to resort submitters 2017-03-08 17:44:12 +00:00
Tim Donohue
a36f5b1f48 Merge pull request #1670 from tuub/DS-3521
[DS-3521] Bugfix browsing embargoed thumbnail
2017-03-08 09:51:56 -06:00
Per Broman
36a87c2107 [DS-3521] Bugfix browsing embargoed thumbnail 2017-03-07 12:09:28 +01:00
Mark H. Wood
43d7cd564c [DS-1140] Add configuration data 2017-03-02 15:49:34 -05:00
Mark H. Wood
9d8738c934 [DS-1140] Add unit test. 2017-03-02 14:50:14 -05:00
Mark H. Wood
c09edc5a15 [DS-1140] No need to treat old and new Word formats differently 2017-03-02 14:49:24 -05:00
Tim Donohue
2d95c7a2a1 Merge pull request #1652 from Georgetown-University-Libraries/ds3282-6x
[DS-3282] 6x Fix js error for filters with dashes
2017-03-01 14:59:47 -06:00
Terry Brady
d2c43b8aa5 Merge pull request #1654 from Georgetown-University-Libraries/ds2789-6_x
[DS-2789] 6x Display a "restricted image" for a thumbnail if the bitstream is restricted
2017-03-01 12:53:44 -08:00
Terry Brady
5d9dd4d4e3 Merge pull request #1660 from Georgetown-University-Libraries/ds3283-6x2
[DS-3283] 6x Mirage2: Edit Collection Source - No Field Label for Set Id
2017-03-01 12:42:38 -08:00
Mark H. Wood
24c1f5367c [DS-1140] New POI-based MS Word extractor and some comment cleanup 2017-02-28 17:12:23 -05:00
Hardy Pottinger
fbaf950388 [DS-3475] adding more guidance to example local.cfg as per suggestion of Tim Donohue 2017-02-28 16:10:08 -06:00
Hardy Pottinger
ddedfa2a14 [DS-3475] added back assetstore.dir configuration to dspace.cfg 2017-02-28 16:07:58 -06:00
Terry W Brady
2b96f9472b Add default lock icon for Mirage theme 2017-02-27 14:10:02 -08:00
Terry W Brady
1af23f2d8b reapply pr from master 2017-02-27 14:10:02 -08:00
Terry W Brady
a868a4bc9b Re-applying changes 2017-02-27 13:45:53 -08:00
Tim Donohue
2734dca1cd Merge pull request #1659 from tdonohue/fix_travis_timeouts
Fix Travis CI Maven download timeouts
2017-02-27 15:36:07 -06:00
Tim Donohue
8c70f9bc8c Workaround for travis-ci/travis-ci#4629 2017-02-27 21:21:08 +00:00
Tom Desair
8d56e828a2 DS-3367: Fix authorization error when non-admin users claim a configurable workflow task 2017-02-23 16:28:37 -05:00
Mark H. Wood
0e8c95a196 Merge pull request #1651 from mwoodiupui/DS-3378
[DS-3378] Patch to restore lost indices, from Adan Roman
2017-02-23 16:06:08 -05:00
Terry Brady
cf190c78e8 Fix js error for filters with dashes 2017-02-23 09:40:10 -08:00
Mark H. Wood
2d1c59ac49 [DS-3378] Patch to restore lost indices, from Adan Roman 2017-02-22 17:24:46 -05:00
Tom Desair
3a03e7a9d3 DS-2952: Added missing license 2017-02-22 20:26:42 +00:00
Tom Desair
757264c1f6 DS-2952: Only prepend new line if we have an actual input stream 2017-02-22 20:26:33 +00:00
Tom Desair
dfe6d79da4 DS-2952: Small improvements to FullTextContentStreams and added a unit test for it 2017-02-22 20:26:23 +00:00
Tom Desair
708fe215b0 DS-2952: Use a SequenceInputStream to add the content of multiple full text bitstreams to SOLR 2017-02-22 20:26:09 +00:00
Hardy Pottinger
a51ad3c6eb Merge pull request #1614 from jonas-atmire/DS-3448-MultiSelect-in-Submission
DS-3448 Multi-select in submission for workflow and workspace items
2017-02-22 12:13:12 -06:00
Hardy Pottinger
c5aebee9cc Merge pull request #1649 from hardyoyo/DS-3501-fix-XML-validation-by-excluding-failing-node-packages
[DS-3501] adjust XML validation
2017-02-22 11:17:38 -06:00
Hardy Pottinger
8a06522fa9 [DS-3501] adjust XML validation to skip contents of any folder that includes the text node/node_modules 2017-02-22 16:41:35 +00:00
samuel
267518ebaf DS 3425 outputstream gets closed in JSONDiscoverySearcher 2017-02-21 21:34:29 +00:00
samuel
2685cd793e DS-3415 - administrative.js doEditCommunity wrong parameter name 2017-02-21 21:03:19 +00:00
Tim Donohue
36c7fa9c1a Merge pull request #1588 from atmire/DS-3419-6_x
DS-3419
2017-02-21 14:55:56 -06:00
Bram Luyten
54c5c2932b DS-2840 sidebar facet logging from INFO to DEBUG
Changes INFO level sidebar facet transformer log entries to DEBUG
2017-02-18 14:20:08 +01:00
Luigi Andrea Pascarelli
7225f2597a DS-3356 add turnoff authz system 2017-02-15 22:10:18 +00:00
Mark H. Wood
59632413c2 [DS-3469] virus scan during submission attempts to read uploaded bitstream as anonymous user, which fails (#1632)
* [DS-3469] Add the current session context to the curation task run.

* [DS-3469] Log how I/O failed, not just that it did.

* [DS-3469] Keep reference to Bundle from which we just removed the Bitstream instead of expecting the List of Bundle to be unaltered.

* [DS-3469] Finish switching from e.getMessage() to e

* [DS-3469] Note the side effect of calling curate() with a Context.
2017-02-08 10:32:29 -06:00
Tim Donohue
7650af1e69 Merge pull request #1639 from rradillen/DS-3473
DS-3473: add guard code in case no dot is present in bitstream name
2017-02-08 10:24:28 -06:00
Tim Donohue
e4659832a0 Merge pull request #1641 from cjuergen/DS-3479-6_x
Fix for DS-3479 preventing the import of empty metadata
2017-02-08 10:15:26 -06:00
Tim Donohue
ab982e4f0b Merge pull request #1613 from tomdesair/DS-3436-Sharding-corrupts-multivalued-fields
DS-3436 Sharding SOLR cores corrupts multivalued fields
2017-02-08 09:47:22 -06:00
Terry Brady
8d76aa2010 [DS-3456] 6x Fix Command Line Parameters for statistics import/export tools (#1624)
* Clarify command line args

* support flexible import/export of stats

* Fix DS-3464 solr-reindex-statistics for shard

* Preserve multi val fields on import/export

* Time zone consistency in shard name creation

* Migrate PR feedback from 5x to 6x

* whitespace
2017-02-08 09:43:03 -06:00
Tim Donohue
9eb7c6734c Merge pull request #1633 from Georgetown-University-Libraries/ds3457b
[DS-3457] Address tomcat hang when multiple solr shards exist in DSpace 6
2017-02-08 09:30:42 -06:00
cjuergen
99c1af8688 Fix for DS-3479 preventing the import of empty metadata 2017-02-06 15:11:14 +01:00
Roeland Dillen
866bfe8fd8 add guard code in case no dot is present in bitsream name 2017-02-05 13:45:40 +01:00
Terry Brady
12de02c7f3 Merge pull request #1637 from kshepherd/DS-3477
[DS-3477] fix altmetrics config lookups in item-view.xsl (6.x)
2017-02-02 09:56:12 -08:00
Kim Shepherd
0c0b280d05 [DS-3477] fix altmetrics config lookups in item-view.xsl 2017-02-02 18:04:36 +13:00
Hardy Pottinger
bf1979fd41 [DS-3475] adding more guidance to example local.cfg as per suggestion of Tim Donohue 2017-02-01 15:49:19 -06:00
Hardy Pottinger
e32b93bae3 [DS-3475] added back assetstore.dir configuration to dspace.cfg 2017-02-01 15:48:51 -06:00
kshepherd
f86fff9063 Merge pull request #1611 from tomdesair/DS-3446-DSpace-6x_Non-admin-submitter-cannot-remove-bitstream
DS-3446: On bitstream delete, remove policies only after the bitstream has been updated
2017-02-02 09:42:33 +13:00
Terry W Brady
f7cadf8774 Initialize solr shards at first stats post
Make it more likely that the shards are awake on first use
2017-01-31 15:02:55 -08:00
Terry W Brady
4f7520d532 Additional comments 2017-01-30 17:05:04 -08:00
Terry W Brady
9904fdb412 DS-3457 and DS-3458 fixes 2017-01-30 12:11:06 -08:00
Terry Brady
e0e223e2bf [DS-3468] 6x Ignore bin directory built by Eclipse (#1627)
* Exclude top level /bin directory built by Eclipse
2017-01-26 16:28:25 +01:00
Hardy Pottinger
45762e993d Merge pull request #1617 from jonas-atmire/DS-3445-ChecksumChecker-no-enum-constant-error
DS-3445 Only add "ResultCode" if not default
2017-01-19 10:15:11 -06:00
Andrew Bennet
ce72010805 [DS-3460] Fix incorrect REST documentation 2017-01-17 21:32:40 +01:00
Bram Luyten
faa12bfd33 Merge pull request #1610 from tomdesair/DS-3108-DSpace-6x_Support-non-email-based-authentication-in-REST-API
DS-3108 DSpace 6x: Support non-email based authentication in REST API
2017-01-14 11:44:35 +01:00
Jonas Van Goolen
2805386f9d DS-3445 Only add "ResultCode" if not default 2017-01-13 10:41:30 +01:00
Jonas Van Goolen
a62eddeb59 DS-3448 Removal of unnecessary duplicate javascript file 2017-01-13 09:43:43 +01:00
Jonas Van Goolen
c873e554d3 DS-3448 Multi-select in submission for workflow and workspace items -> License headers in new files 2017-01-12 13:52:21 +01:00
Jonas Van Goolen
01dee698c2 DS-3448 Multi-select in submission for workflow and workspace items 2017-01-11 15:33:25 +01:00
Tom Desair
eb5dc58384 DS-3436: Tell SOLR to split values of multi-valued fields when sharding cores 2017-01-11 12:55:10 +01:00
Tim Donohue
958631c81c Merge pull request #1600 from samuelcambien/dspace-6_x-DS-3435
DS-3435 possible nullpointerexception at AccessStepUtil$populateEmbar…
2017-01-10 09:04:35 -06:00
Tom Desair
89ded55942 DS-3108 DSpace 6 only: Revert rename REST API login paramter email to user 2017-01-10 14:04:01 +01:00
Tom Desair
9855022228 Revert "DS-3108: Rename REST API login paramter email to user"
This reverts commit d2c4233d9e.
2017-01-10 13:57:29 +01:00
Tom Desair
bfc68d3354 DS-3446: Remove policies only after the bitstream has been updated (otherwise the current user has not WRITE rights) 2017-01-09 22:53:52 +01:00
Tom Desair
38848e16d3 DS-3108: Update REST API authentication documentation
Conflicts:
	dspace-rest/src/main/java/org/dspace/rest/RestIndex.java
2017-01-09 17:33:58 +01:00
Tom Desair
0244a425ae DS-3108: Remove deprication since there is no alternative 2017-01-09 17:32:55 +01:00
Tom Desair
c3c5287880 DS-3108: Remove unused imports 2017-01-09 17:32:49 +01:00
Tom Desair
3321cba560 DS-3108: Remove unnecessary /login-shibboleth endpoint
Conflicts:
	dspace-rest/src/main/java/org/dspace/rest/RestIndex.java
2017-01-09 17:32:45 +01:00
Tom Desair
684e87ed20 DS-3108: Return FORBIDDEN error code when authentication on the REST API failed
Conflicts:
	dspace-rest/src/main/java/org/dspace/rest/RestIndex.java
2017-01-09 17:31:24 +01:00
Tom Desair
d2c4233d9e DS-3108: Rename REST API login paramter email to user 2017-01-09 17:30:38 +01:00
Tom Desair
ae9862395a DS-3108: Support authenticaton mechanisms where the e-mail attribute is not an e-mail address 2017-01-09 17:30:26 +01:00
Tim Donohue
6256c673b9 Merge pull request #1607 from bram-atmire/DS-3289
DS-3289 Removing double slashes in image paths
2017-01-09 09:17:23 -06:00
Bram Luyten
2b0448fe64 DS-3289 Removing double slashes in image paths 2017-01-07 18:22:03 +01:00
cjuergen
1e4ae0b5e3 Cherry pick DS-3440 solution d95902b 2017-01-06 19:09:44 +01:00
Bram Luyten
1f36899abe Merge pull request #1605 from 4Science/DS-3441-6x
DS-3441 READ permssion on the Collection object not respected by the JSPUI (6_x)
2017-01-06 18:18:50 +01:00
Andrea Bollini
a6aa9816d2 DS-3441 READ permssion on the Collection object not respected by the JSPUI 2017-01-06 13:56:47 +01:00
Bram Luyten
242d1357c7 Merge pull request #1601 from tomdesair/DS-3381_Workspace-item-not-saved-when-using-versioning
DS-3381 workspace item not saved when using versioning
2017-01-05 16:43:50 +01:00
Tom Desair
4b927562b6 DS-3381: Do an explicit commit so that the workspace item is written to the database before the redirect to the submission form (see versioning.js doCreateNewVersion) 2017-01-04 23:05:20 +01:00
samuel
7b6ea8e807 DS-3435 possible nullpointerexception at AccessStepUtil$populateEmbargoDetail
Conflicts:
	dspace-xmlui/src/main/java/org/dspace/app/xmlui/aspect/submission/submit/AccessStepUtil.java
2017-01-03 12:40:56 +01:00
Philip Vissenaekens
a3c6aa2ced DS-3419 2016-12-09 13:14:55 +01:00
Ivan Masár
50eed239f5 DS-3363 CSV import error says "row", means "column" 2016-11-14 18:28:11 +01:00
Ivan Masár
3065389435 typo: xforwarderfor -> xforwardedfor 2016-11-01 16:18:45 +01:00
278 changed files with 13241 additions and 3507 deletions

1
.gitignore vendored
View File

@@ -6,6 +6,7 @@ tags
## Ignore project files created by Eclipse
.settings/
/bin/
.project
.classpath

View File

@@ -7,6 +7,8 @@ env:
# Install prerequisites for building Mirage2 more rapidly
before_install:
# Remove outdated settings.xml from Travis builds. Workaround for https://github.com/travis-ci/travis-ci/issues/4629
- rm ~/.m2/settings.xml
# Install Node.js 6.5.0 & print version info
- nvm install 6.5.0
- node --version
@@ -21,10 +23,10 @@ before_install:
# Print ruby version info (should be installed)
- ruby -v
# Install Sass & print version info
- gem install sass
- gem install sass -v 3.3.14
- sass -v
# Install Compass & print version info
- gem install compass
- gem install compass -v 1.0.1
- compass version
# Skip install stage, as we'll do it below

View File

@@ -1,7 +1,7 @@
DSpace source code license:
Copyright (c) 2002-2016, DuraSpace. All rights reserved.
Copyright (c) 2002-2017, DuraSpace. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are

View File

@@ -280,25 +280,25 @@ https://wiki.duraspace.org/display/DSPACE/Code+Contribution+Guidelines
* Morfologik Stemming Dictionary for Polish (org.carrot2:morfologik-polish:1.7.1 - http://morfologik.blogspot.com/morfologik-polish/)
* Morfologik Stemming APIs (org.carrot2:morfologik-stemming:1.7.1 - http://morfologik.blogspot.com/morfologik-stemming/)
* Stax2 API (org.codehaus.woodstox:stax2-api:3.1.1 - http://woodstox.codehaus.org/StAX2)
* DSpace Kernel :: API and Implementation (org.dspace:dspace-api:6.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-api)
* DSpace I18N :: Language Packs (org.dspace:dspace-api-lang:6.0.3 - https://github.com/dspace/dspace-api-lang)
* DSpace JSP-UI (org.dspace:dspace-jspui:6.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-jspui)
* DSpace OAI-PMH (org.dspace:dspace-oai:6.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-oai)
* DSpace RDF (org.dspace:dspace-rdf:6.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-rdf)
* DSpace REST :: API and Implementation (org.dspace:dspace-rest:6.0-rc4-SNAPSHOT - http://demo.dspace.org)
* DSpace Services Framework :: API and Implementation (org.dspace:dspace-services:6.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-services)
* Apache Solr Webapp (org.dspace:dspace-solr:6.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-solr)
* DSpace SWORD (org.dspace:dspace-sword:6.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-sword)
* DSpace SWORD v2 (org.dspace:dspace-swordv2:6.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-swordv2)
* DSpace XML-UI (Manakin) (org.dspace:dspace-xmlui:6.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/dspace-xmlui)
* DSpace XML-UI (Manakin) I18N :: Language Packs (org.dspace:dspace-xmlui-lang:6.0.3 - https://github.com/dspace/dspace-xmlui-lang)
* DSpace Kernel :: API and Implementation (org.dspace:dspace-api:6.2-SNAPSHOT - https://github.com/dspace/DSpace/dspace-api)
* DSpace I18N :: Language Packs (org.dspace:dspace-api-lang:6.0.5 - https://github.com/dspace/dspace-api-lang)
* DSpace JSP-UI (org.dspace:dspace-jspui:6.2-SNAPSHOT - https://github.com/dspace/DSpace/dspace-jspui)
* DSpace OAI-PMH (org.dspace:dspace-oai:6.2-SNAPSHOT - https://github.com/dspace/DSpace/dspace-oai)
* DSpace RDF (org.dspace:dspace-rdf:6.2-SNAPSHOT - https://github.com/dspace/DSpace/dspace-rdf)
* DSpace REST :: API and Implementation (org.dspace:dspace-rest:6.2-SNAPSHOT - http://demo.dspace.org)
* DSpace Services Framework :: API and Implementation (org.dspace:dspace-services:6.2-SNAPSHOT - https://github.com/dspace/DSpace/dspace-services)
* Apache Solr Webapp (org.dspace:dspace-solr:6.2-SNAPSHOT - https://github.com/dspace/DSpace/dspace-solr)
* DSpace SWORD (org.dspace:dspace-sword:6.2-SNAPSHOT - https://github.com/dspace/DSpace/dspace-sword)
* DSpace SWORD v2 (org.dspace:dspace-swordv2:6.2-SNAPSHOT - https://github.com/dspace/DSpace/dspace-swordv2)
* DSpace XML-UI (Manakin) (org.dspace:dspace-xmlui:6.2-SNAPSHOT - https://github.com/dspace/DSpace/dspace-xmlui)
* DSpace XML-UI (Manakin) I18N :: Language Packs (org.dspace:dspace-xmlui-lang:6.0.6 - https://github.com/dspace/dspace-xmlui-lang)
* handle (org.dspace:handle:6.2 - no url defined)
* jargon (org.dspace:jargon:1.4.25 - no url defined)
* mets (org.dspace:mets:1.5.2 - no url defined)
* oclc-harvester2 (org.dspace:oclc-harvester2:0.1.12 - no url defined)
* XOAI : OAI-PMH Java Toolkit (org.dspace:xoai:3.2.10 - http://nexus.sonatype.org/oss-repository-hosting.html/xoai)
* Repackaged Cocoon Servlet Service Implementation (org.dspace.dependencies.cocoon:dspace-cocoon-servlet-service-impl:1.0.3 - http://projects.dspace.org/dspace-pom/dspace-cocoon-servlet-service-impl)
* DSpace Kernel :: Additions and Local Customizations (org.dspace.modules:additions:6.0-rc4-SNAPSHOT - https://github.com/dspace/DSpace/modules/additions)
* DSpace Kernel :: Additions and Local Customizations (org.dspace.modules:additions:6.2-SNAPSHOT - https://github.com/dspace/DSpace/modules/additions)
* Hamcrest All (org.hamcrest:hamcrest-all:1.3 - https://github.com/hamcrest/JavaHamcrest/hamcrest-all)
* Hamcrest Core (org.hamcrest:hamcrest-core:1.3 - https://github.com/hamcrest/JavaHamcrest/hamcrest-core)
* JBibTeX (org.jbibtex:jbibtex:1.0.10 - http://www.jbibtex.org)

View File

@@ -12,7 +12,7 @@
<parent>
<groupId>org.dspace</groupId>
<artifactId>dspace-parent</artifactId>
<version>6.1-SNAPSHOT</version>
<version>6.2</version>
<relativePath>..</relativePath>
</parent>
@@ -266,6 +266,9 @@
<include>**/*.xsl</include>
<include>**/*.xmap</include>
</includes>
<excludes>
<exclude>**/node/node_modules/**</exclude>
</excludes>
</validationSet>
</validationSets>
</configuration>
@@ -505,6 +508,11 @@
<artifactId>contiperf</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.rometools</groupId>
<artifactId>rome-modules</artifactId>

View File

@@ -196,7 +196,7 @@ public class DSpaceCSV implements Serializable
StringBuilder lineBuilder = new StringBuilder();
String lineRead;
while (StringUtils.isNotBlank(lineRead = input.readLine()))
while ((lineRead = input.readLine()) != null)
{
if (lineBuilder.length() > 0) {
// Already have a previously read value - add this line

View File

@@ -34,6 +34,8 @@ public class MetadataExport
protected ItemService itemService;
protected Context context;
/** Whether to export all metadata, or just normally edited metadata */
protected boolean exportAll;
@@ -55,6 +57,7 @@ public class MetadataExport
// Store the export settings
this.toExport = toExport;
this.exportAll = exportAll;
this.context = c;
}
/**
@@ -73,6 +76,7 @@ public class MetadataExport
// Try to export the community
this.toExport = buildFromCommunity(c, toExport, 0);
this.exportAll = exportAll;
this.context = c;
}
catch (SQLException sqle)
{
@@ -144,13 +148,19 @@ public class MetadataExport
{
try
{
Context.Mode originalMode = context.getCurrentMode();
context.setMode(Context.Mode.READ_ONLY);
// Process each item
DSpaceCSV csv = new DSpaceCSV(exportAll);
while (toExport.hasNext())
{
csv.addItem(toExport.next());
Item item = toExport.next();
csv.addItem(item);
context.uncacheEntity(item);
}
context.setMode(originalMode);
// Return the results
return csv;
}
@@ -224,7 +234,7 @@ public class MetadataExport
String filename = line.getOptionValue('f');
// Create a context
Context c = new Context();
Context c = new Context(Context.Mode.READ_ONLY);
c.turnOffAuthorisationSystem();
// The things we'll export

View File

@@ -31,6 +31,7 @@ import org.dspace.eperson.EPerson;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.handle.service.HandleService;
import org.dspace.workflow.WorkflowItem;
import org.dspace.workflow.WorkflowService;
import org.dspace.workflow.factory.WorkflowServiceFactory;
@@ -122,6 +123,9 @@ public class MetadataImport
// Make the changes
try
{
Context.Mode originalMode = c.getCurrentMode();
c.setMode(Context.Mode.BATCH_EDIT);
// Process each change
for (DSpaceCSVLine line : toImport)
{
@@ -134,11 +138,15 @@ public class MetadataImport
throw new MetadataImportException("'action' not allowed for new items!");
}
WorkspaceItem wsItem = null;
WorkflowItem wfItem = null;
Item item = null;
// Is this a new item?
if (id != null)
{
// Get the item
Item item = itemService.find(c, id);
item = itemService.find(c, id);
if (item == null)
{
throw new MetadataImportException("Unknown item ID " + id);
@@ -345,8 +353,8 @@ public class MetadataImport
// Create the item
String collectionHandle = line.get("collection").get(0);
collection = (Collection) handleService.resolveToObject(c, collectionHandle);
WorkspaceItem wsItem = workspaceItemService.create(c, collection, useTemplate);
Item item = wsItem.getItem();
wsItem = workspaceItemService.create(c, collection, useTemplate);
item = wsItem.getItem();
// Add the metadata to the item
for (BulkEditMetadataValue dcv : whatHasChanged.getAdds())
@@ -364,9 +372,9 @@ public class MetadataImport
if(useWorkflow){
WorkflowService workflowService = WorkflowServiceFactory.getInstance().getWorkflowService();
if (workflowNotify) {
workflowService.start(c, wsItem);
wfItem = workflowService.start(c, wsItem);
} else {
workflowService.startWithoutNotify(c, wsItem);
wfItem = workflowService.startWithoutNotify(c, wsItem);
}
}
else
@@ -394,7 +402,16 @@ public class MetadataImport
// Record the changes
changes.add(whatHasChanged);
}
if (change) {
//only clear cache if changes have been made.
c.uncacheEntity(wsItem);
c.uncacheEntity(wfItem);
c.uncacheEntity(item);
}
}
c.setMode(originalMode);
}
catch (MetadataImportException mie)
{

View File

@@ -91,16 +91,16 @@ public class MetadataImportInvalidHeadingException extends Exception
{
if (type == SCHEMA)
{
return "Unknown metadata schema in row " + column + ": " + badHeading;
return "Unknown metadata schema in column " + column + ": " + badHeading;
} else if (type == ELEMENT)
{
return "Unknown metadata element in row " + column + ": " + badHeading;
return "Unknown metadata element in column " + column + ": " + badHeading;
} else if (type == MISSING)
{
return "Row with missing header: Row " + column;
return "Row with missing header: column " + column;
} else
{
return "Bad metadata declaration in row " + column + ": " + badHeading;
return "Bad metadata declaration in column" + column + ": " + badHeading;
}
}
}
}

View File

@@ -7,36 +7,32 @@
*/
package org.dspace.app.harvest;
import org.apache.commons.cli.*;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService;
import org.dspace.content.service.ItemService;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.harvest.HarvestedCollection;
import org.dspace.harvest.HarvestingException;
import org.dspace.harvest.OAIHarvester;
import org.dspace.harvest.factory.HarvestServiceFactory;
import org.dspace.harvest.service.HarvestedCollectionService;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Iterator;
import java.util.List;
import java.util.UUID;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection;
import org.dspace.content.DSpaceObject;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService;
import org.dspace.content.service.ItemService;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.harvest.HarvestedCollection;
import org.dspace.content.Item;
import org.dspace.harvest.HarvestingException;
import org.dspace.harvest.OAIHarvester;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.harvest.factory.HarvestServiceFactory;
import org.dspace.harvest.service.HarvestedCollectionService;
/**
* Test class for harvested collections.
*
@@ -91,7 +87,7 @@ public class Harvest
HelpFormatter myhelp = new HelpFormatter();
myhelp.printHelp("Harvest\n", options);
System.out
.println("\nPING OAI server: Harvest -g -s oai_source -i oai_set_id");
.println("\nPING OAI server: Harvest -g -a oai_source -i oai_set_id");
System.out
.println("RUNONCE harvest with arbitrary options: Harvest -o -e eperson -c collection -t harvest_type -a oai_source -i oai_set_id -m metadata_format");
System.out
@@ -162,7 +158,7 @@ public class Harvest
// Instantiate our class
Harvest harvester = new Harvest();
harvester.context = new Context();
harvester.context = new Context(Context.Mode.BATCH_EDIT);
// Check our options
@@ -375,6 +371,8 @@ public class Harvest
Item item = it.next();
System.out.println("Deleting: " + item.getHandle());
collectionService.removeItem(context, collection, item);
context.uncacheEntity(item);
// Dispatch events every 50 items
if (i%50 == 0) {
context.dispatchEvents();

View File

@@ -176,7 +176,7 @@ public class ItemExportCLITool {
System.exit(1);
}
Context c = new Context();
Context c = new Context(Context.Mode.READ_ONLY);
c.turnOffAuthorisationSystem();
if (myType == Constants.ITEM)

View File

@@ -7,44 +7,28 @@
*/
package org.dspace.app.itemexport;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.io.PrintWriter;
import java.sql.SQLException;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
import javax.mail.MessagingException;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.dspace.app.itemexport.service.ItemExportService;
import org.dspace.content.*;
import org.dspace.content.Collection;
import org.dspace.content.service.BitstreamService;
import org.dspace.content.service.CommunityService;
import org.dspace.content.service.ItemService;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.I18nUtil;
import org.dspace.core.LogManager;
import org.dspace.core.Utils;
import org.dspace.core.Email;
import org.dspace.core.*;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.service.EPersonService;
import org.dspace.handle.service.HandleService;
import org.springframework.beans.factory.annotation.Autowired;
import javax.mail.MessagingException;
import java.io.*;
import java.sql.SQLException;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
/**
* Item exporter to create simple AIPs for DSpace content. Currently exports
* individual items, or entire collections. For instructions on use, see
@@ -129,7 +113,9 @@ public class ItemExportServiceImpl implements ItemExportService
}
System.out.println("Exporting item to " + mySequenceNumber);
exportItem(c, i.next(), fullPath, mySequenceNumber, migrate, excludeBitstreams);
Item item = i.next();
exportItem(c, item, fullPath, mySequenceNumber, migrate, excludeBitstreams);
c.uncacheEntity(item);
mySequenceNumber++;
}
}

View File

@@ -189,7 +189,7 @@ public class ItemImportCLITool {
String zipfilename = "";
if (line.hasOption('z')) {
zip = true;
zipfilename = sourcedir + System.getProperty("file.separator") + line.getOptionValue('z');
zipfilename = line.getOptionValue('z');
}
//By default assume collections will be given on the command line
@@ -294,7 +294,7 @@ public class ItemImportCLITool {
myloader.setQuiet(isQuiet);
// create a context
Context c = new Context();
Context c = new Context(Context.Mode.BATCH_EDIT);
// find the EPerson, assign to context
EPerson myEPerson = null;

View File

@@ -14,21 +14,6 @@ import gr.ekt.bte.core.TransformationSpec;
import gr.ekt.bte.dataloader.FileDataLoader;
import gr.ekt.bteio.generators.DSpaceOutputGenerator;
import gr.ekt.bteio.loaders.OAIPMHDataLoader;
import java.io.*;
import java.net.URL;
import java.sql.SQLException;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.zip.ZipFile;
import java.util.zip.ZipEntry;
import javax.mail.MessagingException;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException;
import org.apache.commons.collections.ComparatorUtils;
import org.apache.commons.io.FileDeleteStrategy;
import org.apache.commons.io.FileUtils;
@@ -46,18 +31,14 @@ import org.dspace.authorize.service.ResourcePolicyService;
import org.dspace.content.*;
import org.dspace.content.Collection;
import org.dspace.content.service.*;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.Email;
import org.dspace.core.I18nUtil;
import org.dspace.core.LogManager;
import org.dspace.core.*;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
import org.dspace.eperson.service.EPersonService;
import org.dspace.eperson.service.GroupService;
import org.dspace.handle.service.HandleService;
import org.dspace.utils.DSpace;
import org.dspace.workflow.WorkflowItem;
import org.dspace.workflow.WorkflowService;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
@@ -67,6 +48,19 @@ import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
import javax.mail.MessagingException;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.TransformerException;
import java.io.*;
import java.net.URL;
import java.sql.SQLException;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.zip.ZipEntry;
import java.util.zip.ZipFile;
/**
* Import items into DSpace. The conventional use is upload files by copying
@@ -341,7 +335,8 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
{
clist = mycollections;
}
addItem(c, clist, sourceDir, dircontents[i], mapOut, template);
Item item = addItem(c, clist, sourceDir, dircontents[i], mapOut, template);
c.uncacheEntity(item);
System.out.println(i + " " + dircontents[i]);
}
}
@@ -414,7 +409,9 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
handleOut.close();
deleteItem(c, oldItem);
addItem(c, mycollections, sourceDir, newItemName, null, template);
Item newItem = addItem(c, mycollections, sourceDir, newItemName, null, template);
c.uncacheEntity(oldItem);
c.uncacheEntity(newItem);
}
}
@@ -445,6 +442,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
Item myitem = itemService.findByIdOrLegacyId(c, itemID);
System.out.println("Deleting item " + itemID);
deleteItem(c, myitem);
c.uncacheEntity(myitem);
}
}
}
@@ -470,6 +468,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
// create workspace item
Item myitem = null;
WorkspaceItem wi = null;
WorkflowItem wfi = null;
if (!isTest)
{
@@ -495,9 +494,9 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
{
// Should we send a workflow alert email or not?
if (useWorkflowSendEmail) {
workflowService.start(c, wi);
wfi = workflowService.start(c, wi);
} else {
workflowService.startWithoutNotify(c, wi);
wfi = workflowService.startWithoutNotify(c, wi);
}
// send ID to the mapfile
@@ -553,6 +552,10 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
mapOut.println(mapOutputString);
}
//Clear intermediary objects from the cache
c.uncacheEntity(wi);
c.uncacheEntity(wfi);
return myitem;
}
@@ -590,6 +593,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
else
{
deleteItem(c, myitem);
c.uncacheEntity(myitem);
}
}
@@ -712,6 +716,10 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
{
value = "";
}
else
{
value = value.trim();
}
// //getElementData(n, "element");
String element = getAttributeValue(n, "element");
String qualifier = getAttributeValue(n, "qualifier"); //NodeValue();
@@ -733,8 +741,8 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
{
qualifier = null;
}
if (!isTest)
// only add metadata if it is no test and there is a real value
if (!isTest && !value.equals(""))
{
itemService.addMetadata(c, i, schema, element, qualifier, language, value);
}

View File

@@ -7,21 +7,7 @@
*/
package org.dspace.app.itemupdate;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FilenameFilter;
import java.io.FileNotFoundException;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.*;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.cli.*;
import org.dspace.content.Item;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService;
@@ -31,6 +17,9 @@ import org.dspace.eperson.EPerson;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService;
import java.io.*;
import java.util.*;
/**
*
* Provides some batch editing capabilities for items in DSpace:
@@ -353,7 +342,7 @@ public class ItemUpdate {
pr("ItemUpdate - initializing run on " + (new Date()).toString());
context = new Context();
context = new Context(Context.Mode.BATCH_EDIT);
iu.setEPerson(context, iu.eperson);
context.turnOffAuthorisationSystem();
@@ -460,6 +449,7 @@ public class ItemUpdate {
{
Item item = itarch.getItem();
itemService.update(context, item); //need to update before commit
context.uncacheEntity(item);
}
ItemUpdate.pr("Item " + dirname + " completed");
successItemCount++;

View File

@@ -44,18 +44,20 @@ public interface FormatFilter
public String getFormatString();
/**
* @return string to describe the newly-generated Bitstream's - how it was
* @return string to describe the newly-generated Bitstream - how it was
* produced is a good idea
*/
public String getDescription();
/**
* Read the source stream and produce the filtered content.
*
* @param item Item
* @param source
* input stream
* @param verbose verbosity flag
*
* @return result of filter's transformation, written out to a bitstream
* @return result of filter's transformation as a byte stream.
* @throws Exception if error
*/
public InputStream getDestinationStream(Item item, InputStream source, boolean verbose)

View File

@@ -46,11 +46,4 @@ public class ImageMagickPdfThumbnailFilter extends ImageMagickThumbnailFilter {
}
}
public static final String[] PDF = {"Adobe PDF"};
@Override
public String[] getInputMIMETypes()
{
return PDF;
}
}

View File

@@ -23,6 +23,7 @@ import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService;
import org.dspace.core.Context;
import org.im4java.core.ConvertCmd;
import org.im4java.core.Info;
import org.im4java.core.IM4JavaException;
import org.im4java.core.IMOperation;
import org.im4java.process.ProcessStarter;
@@ -34,175 +35,170 @@ import org.dspace.core.ConfigurationManager;
* thumbnail.maxwidth, thumbnail.maxheight, the size we want our thumbnail to be
* no bigger than. Creates only JPEGs.
*/
public abstract class ImageMagickThumbnailFilter extends MediaFilter implements SelfRegisterInputFormats
{
protected static int width = 180;
protected static int height = 120;
private static boolean flatten = true;
static String bitstreamDescription = "IM Thumbnail";
static final String defaultPattern = "Generated Thumbnail";
static Pattern replaceRegex = Pattern.compile(defaultPattern);
protected final ItemService itemService = ContentServiceFactory.getInstance().getItemService();
static {
String pre = ImageMagickThumbnailFilter.class.getName();
String s = ConfigurationManager.getProperty(pre + ".ProcessStarter");
ProcessStarter.setGlobalSearchPath(s);
width = ConfigurationManager.getIntProperty("thumbnail.maxwidth", width);
height = ConfigurationManager.getIntProperty("thumbnail.maxheight", height);
public abstract class ImageMagickThumbnailFilter extends MediaFilter {
protected static int width = 180;
protected static int height = 120;
private static boolean flatten = true;
static String bitstreamDescription = "IM Thumbnail";
static final String defaultPattern = "Generated Thumbnail";
static Pattern replaceRegex = Pattern.compile(defaultPattern);
protected final ItemService itemService = ContentServiceFactory.getInstance().getItemService();
static String cmyk_profile;
static String srgb_profile;
static {
String pre = ImageMagickThumbnailFilter.class.getName();
String s = ConfigurationManager.getProperty(pre + ".ProcessStarter");
ProcessStarter.setGlobalSearchPath(s);
width = ConfigurationManager.getIntProperty("thumbnail.maxwidth", width);
height = ConfigurationManager.getIntProperty("thumbnail.maxheight", height);
flatten = ConfigurationManager.getBooleanProperty(pre + ".flatten", flatten);
String description = ConfigurationManager.getProperty(pre + ".bitstreamDescription");
if (description != null) {
bitstreamDescription = description;
}
try {
String patt = ConfigurationManager.getProperty(pre + ".replaceRegex");
replaceRegex = Pattern.compile(patt == null ? defaultPattern : patt);
} catch(PatternSyntaxException e) {
System.err.println("Invalid thumbnail replacement pattern: "+e.getMessage());
}
}
public ImageMagickThumbnailFilter() {
}
@Override
public String getFilteredName(String oldFilename)
{
return oldFilename + ".jpg";
}
/**
* @return String bundle name
*
*/
@Override
public String getBundleName()
{
return "THUMBNAIL";
}
/**
* @return String bitstreamformat
*/
@Override
public String getFormatString()
{
return "JPEG";
}
/**
* @return String bitstreamDescription
*/
@Override
public String getDescription()
{
return bitstreamDescription;
}
public File inputStreamToTempFile(InputStream source, String prefix, String suffix) throws IOException {
File f = File.createTempFile(prefix, suffix);
f.deleteOnExit();
FileOutputStream fos = new FileOutputStream(f);
byte[] buffer = new byte[1024];
int len = source.read(buffer);
while (len != -1) {
fos.write(buffer, 0, len);
len = source.read(buffer);
}
fos.close();
return f;
}
public File getThumbnailFile(File f, boolean verbose) throws IOException, InterruptedException, IM4JavaException {
File f2 = new File(f.getParentFile(), f.getName() + ".jpg");
f2.deleteOnExit();
ConvertCmd cmd = new ConvertCmd();
IMOperation op = new IMOperation();
op.addImage(f.getAbsolutePath());
op.thumbnail(width, height);
op.addImage(f2.getAbsolutePath());
if (verbose) {
System.out.println("IM Thumbnail Param: "+op);
}
cmd.run(op);
return f2;
}
public File getImageFile(File f, int page, boolean verbose) throws IOException, InterruptedException, IM4JavaException {
File f2 = new File(f.getParentFile(), f.getName() + ".jpg");
f2.deleteOnExit();
ConvertCmd cmd = new ConvertCmd();
IMOperation op = new IMOperation();
String s = "[" + page + "]";
op.addImage(f.getAbsolutePath()+s);
if (flatten)
{
op.flatten();
String description = ConfigurationManager.getProperty(pre + ".bitstreamDescription");
cmyk_profile = ConfigurationManager.getProperty(pre + ".cmyk_profile");
srgb_profile = ConfigurationManager.getProperty(pre + ".srgb_profile");
if (description != null) {
bitstreamDescription = description;
}
try {
String patt = ConfigurationManager.getProperty(pre + ".replaceRegex");
replaceRegex = Pattern.compile(patt == null ? defaultPattern : patt);
} catch (PatternSyntaxException e) {
System.err.println("Invalid thumbnail replacement pattern: " + e.getMessage());
}
op.addImage(f2.getAbsolutePath());
if (verbose) {
System.out.println("IM Image Param: "+op);
}
cmd.run(op);
return f2;
}
@Override
public boolean preProcessBitstream(Context c, Item item, Bitstream source, boolean verbose)
throws Exception
{
String nsrc = source.getName();
for(Bundle b: itemService.getBundles(item, "THUMBNAIL")) {
for(Bitstream bit: b.getBitstreams()) {
String n = bit.getName();
if (n != null) {
if (nsrc != null) {
if (!n.startsWith(nsrc)) continue;
}
}
String description = bit.getDescription();
//If anything other than a generated thumbnail is found, halt processing
if (description != null) {
if (replaceRegex.matcher(description).matches()) {
if (verbose) {
System.out.println(description + " " + nsrc + " matches pattern and is replacable.");
}
continue;
}
if (description.equals(bitstreamDescription)) {
if (verbose) {
System.out.println(bitstreamDescription + " " + nsrc + " is replacable.");
}
continue;
}
}
System.out.println("Custom Thumbnail exists for " + nsrc + " for item " + item.getHandle() + ". Thumbnail will not be generated. ");
return false;
}
}
return true; //assume that the thumbnail is a custom one
}
@Override
public String[] getInputMIMETypes()
{
return ImageIO.getReaderMIMETypes();
}
public ImageMagickThumbnailFilter() {
}
@Override
public String[] getInputDescriptions()
{
return null;
}
@Override
public String getFilteredName(String oldFilename) {
return oldFilename + ".jpg";
}
/**
* @return String bundle name
*
*/
@Override
public String getBundleName() {
return "THUMBNAIL";
}
/**
* @return String bitstreamformat
*/
@Override
public String getFormatString() {
return "JPEG";
}
/**
* @return String bitstreamDescription
*/
@Override
public String getDescription() {
return bitstreamDescription;
}
public File inputStreamToTempFile(InputStream source, String prefix, String suffix) throws IOException {
File f = File.createTempFile(prefix, suffix);
f.deleteOnExit();
FileOutputStream fos = new FileOutputStream(f);
byte[] buffer = new byte[1024];
int len = source.read(buffer);
while (len != -1) {
fos.write(buffer, 0, len);
len = source.read(buffer);
}
fos.close();
return f;
}
public File getThumbnailFile(File f, boolean verbose)
throws IOException, InterruptedException, IM4JavaException {
File f2 = new File(f.getParentFile(), f.getName() + ".jpg");
f2.deleteOnExit();
ConvertCmd cmd = new ConvertCmd();
IMOperation op = new IMOperation();
op.addImage(f.getAbsolutePath());
op.thumbnail(width, height);
op.addImage(f2.getAbsolutePath());
if (verbose) {
System.out.println("IM Thumbnail Param: " + op);
}
cmd.run(op);
return f2;
}
public File getImageFile(File f, int page, boolean verbose)
throws IOException, InterruptedException, IM4JavaException {
File f2 = new File(f.getParentFile(), f.getName() + ".jpg");
f2.deleteOnExit();
ConvertCmd cmd = new ConvertCmd();
IMOperation op = new IMOperation();
String s = "[" + page + "]";
op.addImage(f.getAbsolutePath() + s);
if (flatten) {
op.flatten();
}
// PDFs using the CMYK color system can be handled specially if
// profiles are defined
if (cmyk_profile != null && srgb_profile != null) {
Info imageInfo = new Info(f.getAbsolutePath(), true);
String imageClass = imageInfo.getImageClass();
if (imageClass.contains("CMYK")) {
op.profile(cmyk_profile);
op.profile(srgb_profile);
}
}
op.addImage(f2.getAbsolutePath());
if (verbose) {
System.out.println("IM Image Param: " + op);
}
cmd.run(op);
return f2;
}
@Override
public boolean preProcessBitstream(Context c, Item item, Bitstream source, boolean verbose) throws Exception {
String nsrc = source.getName();
for (Bundle b : itemService.getBundles(item, "THUMBNAIL")) {
for (Bitstream bit : b.getBitstreams()) {
String n = bit.getName();
if (n != null) {
if (nsrc != null) {
if (!n.startsWith(nsrc))
continue;
}
}
String description = bit.getDescription();
// If anything other than a generated thumbnail
// is found, halt processing
if (description != null) {
if (replaceRegex.matcher(description).matches()) {
if (verbose) {
System.out.println(description + " " + nsrc
+ " matches pattern and is replacable.");
}
continue;
}
if (description.equals(bitstreamDescription)) {
if (verbose) {
System.out.println(bitstreamDescription + " " + nsrc
+ " is replacable.");
}
continue;
}
}
System.out.println("Custom Thumbnail exists for " + nsrc + " for item "
+ item.getHandle() + ". Thumbnail will not be generated. ");
return false;
}
}
return true; // assume that the thumbnail is a custom one
}
@Override
public String[] getInputExtensions()
{
return ImageIO.getReaderFileSuffixes();
}
}

View File

@@ -7,9 +7,6 @@
*/
package org.dspace.app.mediafilter;
import java.io.InputStream;
import java.util.*;
import org.dspace.app.mediafilter.service.MediaFilterService;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.content.*;
@@ -24,6 +21,9 @@ import org.dspace.services.ConfigurationService;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import java.io.InputStream;
import java.util.*;
/**
* MediaFilterManager is the class that invokes the media/format filters over the
* repository's content. A few command line flags affect the operation of the
@@ -161,6 +161,7 @@ public class MediaFilterServiceImpl implements MediaFilterService, InitializingB
++processed;
}
// clear item objects from context cache and internal cache
c.uncacheEntity(currentItem);
currentItem = null;
}
}

View File

@@ -0,0 +1,81 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.mediafilter;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import java.io.IOException;
import org.apache.poi.POITextExtractor;
import org.apache.poi.extractor.ExtractorFactory;
import org.apache.poi.openxml4j.exceptions.OpenXML4JException;
import org.apache.xmlbeans.XmlException;
import org.dspace.content.Item;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Extract flat text from Microsoft Word documents (.doc, .docx).
*/
public class PoiWordFilter
extends MediaFilter
{
private static final Logger LOG = LoggerFactory.getLogger(PoiWordFilter.class);
@Override
public String getFilteredName(String oldFilename)
{
return oldFilename + ".txt";
}
@Override
public String getBundleName()
{
return "TEXT";
}
@Override
public String getFormatString()
{
return "Text";
}
@Override
public String getDescription()
{
return "Extracted text";
}
@Override
public InputStream getDestinationStream(Item currentItem, InputStream source, boolean verbose)
throws Exception
{
String text;
try
{
// get input stream from bitstream, pass to filter, get string back
POITextExtractor extractor = ExtractorFactory.createExtractor(source);
text = extractor.getText();
}
catch (IOException | OpenXML4JException | XmlException e)
{
System.err.format("Invalid File Format: %s%n", e.getMessage());
LOG.error("Unable to parse the bitstream: ", e);
throw e;
}
// if verbose flag is set, print out extracted text to STDOUT
if (verbose)
{
System.out.println(text);
}
// return the extracted text as a stream.
return new ByteArrayInputStream(text.getBytes());
}
}

View File

@@ -7,26 +7,14 @@
*/
package org.dspace.app.packager;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStreamReader;
import java.sql.SQLException;
import java.util.List;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.cli.*;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.DSpaceObject;
import org.dspace.content.crosswalk.CrosswalkException;
import org.dspace.content.packager.PackageDisseminator;
import org.dspace.content.packager.PackageException;
import org.dspace.content.packager.PackageParameters;
import org.dspace.content.packager.PackageIngester;
import org.dspace.content.packager.PackageParameters;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.factory.CoreServiceFactory;
@@ -36,6 +24,10 @@ import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.workflow.WorkflowException;
import java.io.*;
import java.sql.SQLException;
import java.util.List;
/**
* Command-line interface to the Packager plugin.
* <p>
@@ -331,6 +323,7 @@ public class Packager
//If we are in REPLACE mode
if(pkgParams.replaceModeEnabled())
{
context.setMode(Context.Mode.BATCH_EDIT);
PackageIngester sip = (PackageIngester) pluginService
.getNamedPlugin(PackageIngester.class, myPackager.packageType);
if (sip == null)
@@ -394,6 +387,8 @@ public class Packager
//else if normal SUBMIT mode (or basic RESTORE mode -- which is a special type of submission)
else if (myPackager.submit || pkgParams.restoreModeEnabled())
{
context.setMode(Context.Mode.BATCH_EDIT);
PackageIngester sip = (PackageIngester) pluginService
.getNamedPlugin(PackageIngester.class, myPackager.packageType);
if (sip == null)
@@ -445,6 +440,8 @@ public class Packager
}// else, if DISSEMINATE mode
else
{
context.setMode(Context.Mode.READ_ONLY);
//retrieve specified package disseminator
PackageDisseminator dip = (PackageDisseminator) pluginService
.getNamedPlugin(PackageDisseminator.class, myPackager.packageType);

View File

@@ -7,28 +7,9 @@
*/
package org.dspace.app.sitemap;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.UnsupportedEncodingException;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLEncoder;
import java.sql.SQLException;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.cli.*;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.dspace.content.Collection;
import org.dspace.content.Community;
@@ -42,6 +23,16 @@ import org.dspace.core.LogManager;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
import java.io.*;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLEncoder;
import java.sql.SQLException;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
/**
* Command-line utility for generating HTML and Sitemaps.org protocol Sitemaps.
*
@@ -188,7 +179,7 @@ public class GenerateSitemaps
+ "?map=", null);
}
Context c = new Context();
Context c = new Context(Context.Mode.READ_ONLY);
List<Community> comms = communityService.findAll(c);
@@ -201,6 +192,8 @@ public class GenerateSitemaps
if (makeSitemapOrg) {
sitemapsOrg.addURL(url, null);
}
c.uncacheEntity(comm);
}
List<Collection> colls = collectionService.findAll(c);
@@ -214,6 +207,8 @@ public class GenerateSitemaps
if (makeSitemapOrg) {
sitemapsOrg.addURL(url, null);
}
c.uncacheEntity(coll);
}
Iterator<Item> allItems = itemService.findAll(c);
@@ -234,6 +229,8 @@ public class GenerateSitemaps
sitemapsOrg.addURL(url, lastMod);
}
c.uncacheEntity(i);
itemCount++;
}

View File

@@ -0,0 +1,112 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.app.util;
import org.apache.log4j.Logger;
import org.dspace.content.Bitstream;
import org.dspace.content.BitstreamFormat;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.core.Context;
import java.sql.SQLException;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Map;
/**
* This comparator is used to order files of an item, so that they are ordered in a way that the first one
* is the most useful for use in the citation_pdf_url for Google Scholar
*/
public class GoogleBitstreamComparator implements Comparator<Bitstream>{
private final static Logger log = Logger.getLogger(GoogleBitstreamComparator.class);
HashMap<String, Integer> priorityMap = new HashMap<>();
private Context context;
public GoogleBitstreamComparator(Context context, Map<String, String> googleScholarSettings) {
this.context = context;
String[] shortDescriptions;
if (googleScholarSettings.containsKey("citation.prioritized_types")){
shortDescriptions = splitAndTrim(googleScholarSettings.get("citation.prioritized_types"));
} else {
log.warn("Please define citation.prioritized_types in google-metadata.properties");
shortDescriptions = new String[0];
}
int priority = 1;
for(String s: shortDescriptions){
try {
BitstreamFormat format = ContentServiceFactory.getInstance().getBitstreamFormatService().findByShortDescription(context, s);
if (format != null) {
priorityMap.put(format.getMIMEType(), priority);
} else {
log.warn(s + " is not a valid short description, please add it to bitstream-formats.xml");
}
priority++;
} catch (SQLException e){
log.error(e.getMessage());
}
}
}
private String[] splitAndTrim(String toSplit){
if(toSplit != null) {
String[] splittedArray = toSplit.split(",");
for (int i = 0; i < splittedArray.length; i++)
splittedArray[i] = splittedArray[i].trim();
return splittedArray;
}
else {
return new String[0];
}
}
/**
* Compares two bitstreams based on their mimetypes, if mimetypes are the same,then the largest bitstream comes first
* See google-metadata.properties to define the order
* @param b1 first bitstream
* @param b2 second bitstream
* @return
*/
public int compare(Bitstream b1, Bitstream b2) {
int priority1 = getPriorityFromBitstream(b1);
int priority2 = getPriorityFromBitstream(b2);
if(priority1 > priority2){
return 1;
}
else if(priority1 == priority2){
if(b1.getSize() <= b2.getSize()){
return 1;
}
else {
return -1;
}
}
else {
return -1;
}
}
private int getPriorityFromBitstream(Bitstream bitstream) {
try {
String check = bitstream.getFormat(context).getMIMEType();
if (priorityMap.containsKey(bitstream.getFormat(context).getMIMEType())) {
return priorityMap.get(bitstream.getFormat(context).getMIMEType());
} else {
return Integer.MAX_VALUE;
}
} catch (SQLException e) {
log.error(e.getMessage());
return Integer.MAX_VALUE;
}
}
}

View File

@@ -7,38 +7,30 @@
*/
package org.dspace.app.util;
import java.sql.SQLException;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ListMultimap;
import org.apache.log4j.Logger;
import org.dspace.authorize.factory.AuthorizeServiceFactory;
import org.dspace.content.*;
import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import org.apache.log4j.Logger;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService;
import org.dspace.core.ConfigurationManager;
import java.io.File;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map.Entry;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.handle.factory.HandleServiceFactory;
import org.jdom.Element;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.net.MalformedURLException;
import java.net.URL;
import java.sql.SQLException;
import java.util.*;
import java.util.Collection;
import java.util.Map.Entry;
/**
* Configuration and mapping for Google Scholar output metadata
* @author Sands Fish
@@ -60,7 +52,7 @@ public class GoogleMetadata
protected String itemURL;
// Configuration keys and fields
protected static Map<String, String> configuredFields = new HashMap<String, String>();
protected static Map<String, String> googleScholarSettings = new HashMap<String, String>();
// Google field names (e.g. citation_fieldname) and formatted metadata
// values
@@ -128,6 +120,8 @@ public class GoogleMetadata
protected final int ALL_FIELDS_IN_OPTION = 2;
private static GoogleBitstreamComparator googleBitstreamComparator = null;
// Load configured fields from google-metadata.properties
static
{
@@ -181,7 +175,7 @@ public class GoogleMetadata
if (null != name && !name.equals("") && null != field
&& !field.equals(""))
{
configuredFields.put(name.trim(), field.trim());
googleScholarSettings.put(name.trim(), field.trim());
}
}
}
@@ -200,9 +194,9 @@ public class GoogleMetadata
{
log.debug("Google Metadata Configuration Mapping:");
for (String name : configuredFields.keySet())
for (String name : googleScholarSettings.keySet())
{
log.debug(" " + name + " => " + configuredFields.get(name));
log.debug(" " + name + " => " + googleScholarSettings.get(name));
}
}
@@ -221,6 +215,7 @@ public class GoogleMetadata
this.item = item;
this.itemService = ContentServiceFactory.getInstance().getItemService();
itemURL = HandleServiceFactory.getInstance().getHandleService().resolveToURL(context, item.getHandle());
googleBitstreamComparator = new GoogleBitstreamComparator(context, googleScholarSettings);
parseItem();
}
@@ -234,7 +229,7 @@ public class GoogleMetadata
protected boolean addSingleField(String fieldName)
{
String config = configuredFields.get(fieldName);
String config = googleScholarSettings.get(fieldName);
if (null == config || config.equals(""))
{
@@ -738,7 +733,7 @@ public class GoogleMetadata
addSingleField(PATENT_NUMBER);
// Use config value for patent country. Should be a literal.
String countryConfig = configuredFields.get(PATENT_COUNTRY);
String countryConfig = googleScholarSettings.get(PATENT_COUNTRY);
if (null != countryConfig && !countryConfig.trim().equals(""))
{
metadataMappings.put(PATENT_COUNTRY, countryConfig.trim());
@@ -1051,10 +1046,13 @@ public class GoogleMetadata
*/
protected Bitstream findLinkableFulltext(Item item) throws SQLException {
Bitstream bestSoFar = null;
int bitstreamCount = 0;
List<Bundle> contentBundles = itemService.getBundles(item, "ORIGINAL");
for (Bundle bundle : contentBundles) {
List<Bitstream> bitstreams = bundle.getBitstreams();
Collections.sort(bitstreams, googleBitstreamComparator);
for (Bitstream candidate : bitstreams) {
if (candidate.equals(bundle.getPrimaryBitstream())) { // is primary -> use this one
if (isPublic(candidate)) {
@@ -1097,7 +1095,7 @@ public class GoogleMetadata
protected void addAggregateValues(String field, String delimiter)
{
String authorConfig = configuredFields.get(field);
String authorConfig = googleScholarSettings.get(field);
ArrayList<MetadataValue> fields = resolveMetadataFields(authorConfig);
if (null != fields && !fields.isEmpty())
@@ -1125,7 +1123,7 @@ public class GoogleMetadata
*/
protected void addMultipleValues(String FIELD)
{
String fieldConfig = configuredFields.get(FIELD);
String fieldConfig = googleScholarSettings.get(FIELD);
ArrayList<MetadataValue> fields = resolveMetadataFields(fieldConfig);
if (null != fields && !fields.isEmpty())
@@ -1146,7 +1144,7 @@ public class GoogleMetadata
protected boolean itemIsDissertation()
{
String dConfig = configuredFields.get(DISSERTATION_ID);
String dConfig = googleScholarSettings.get(DISSERTATION_ID);
if (null == dConfig || dConfig.trim().equals(""))
{
return false;
@@ -1165,7 +1163,7 @@ public class GoogleMetadata
protected boolean itemIsPatent()
{
String dConfig = configuredFields.get(PATENT_ID);
String dConfig = googleScholarSettings.get(PATENT_ID);
if (null == dConfig || dConfig.trim().equals(""))
{
return false;
@@ -1184,7 +1182,7 @@ public class GoogleMetadata
protected boolean itemIsTechReport()
{
String dConfig = configuredFields.get(TECH_REPORT_ID);
String dConfig = googleScholarSettings.get(TECH_REPORT_ID);
if (null == dConfig || dConfig.trim().equals(""))
{
return false;

View File

@@ -145,7 +145,6 @@ public class DSpaceAuthorityIndexer implements AuthorityIndexerInterface, Initia
} else {
// 3. iterate over the items
if (itemIterator.hasNext()) {
currentItem = itemIterator.next();
//Reset our current field index

View File

@@ -244,6 +244,12 @@ public class AuthorizeServiceImpl implements AuthorizeService
return true;
}
// If authorization was given before and cached
Boolean cachedResult = c.getCachedAuthorizationResult(o, action, e);
if (cachedResult != null) {
return cachedResult.booleanValue();
}
// is eperson set? if not, userToCheck = null (anonymous)
EPerson userToCheck = null;
if (e != null)
@@ -254,8 +260,9 @@ public class AuthorizeServiceImpl implements AuthorizeService
// if user is an Admin on this object
DSpaceObject adminObject = useInheritance ? serviceFactory.getDSpaceObjectService(o).getAdminObject(c, o, action) : null;
if (isAdmin(c, adminObject))
if (isAdmin(c, e, adminObject))
{
c.cacheAuthorizedAction(o, action, e, true, null);
return true;
}
}
@@ -297,6 +304,11 @@ public class AuthorizeServiceImpl implements AuthorizeService
if (ignoreCustomPolicies
&& ResourcePolicy.TYPE_CUSTOM.equals(rp.getRpType()))
{
if(c.isReadOnly()) {
//When we are in read-only mode, we will cache authorized actions in a different way
//So we remove this resource policy from the cache.
c.uncacheEntity(rp);
}
continue;
}
@@ -305,20 +317,29 @@ public class AuthorizeServiceImpl implements AuthorizeService
{
if (rp.getEPerson() != null && rp.getEPerson().equals(userToCheck))
{
c.cacheAuthorizedAction(o, action, e, true, rp);
return true; // match
}
if ((rp.getGroup() != null)
&& (groupService.isMember(c, rp.getGroup())))
&& (groupService.isMember(c, e, rp.getGroup())))
{
// group was set, and eperson is a member
// of that group
c.cacheAuthorizedAction(o, action, e, true, rp);
return true;
}
}
if(c.isReadOnly()) {
//When we are in read-only mode, we will cache authorized actions in a different way
//So we remove this resource policy from the cache.
c.uncacheEntity(rp);
}
}
// default authorization is denial
c.cacheAuthorizedAction(o, action, e, false, null);
return false;
}
@@ -349,9 +370,14 @@ public class AuthorizeServiceImpl implements AuthorizeService
@Override
public boolean isAdmin(Context c, DSpaceObject o) throws SQLException
{
return this.isAdmin(c, c.getCurrentUser(), o);
}
@Override
public boolean isAdmin(Context c, EPerson e, DSpaceObject o) throws SQLException
{
// return true if user is an Administrator
if (isAdmin(c))
if (isAdmin(c, e))
{
return true;
}
@@ -361,6 +387,11 @@ public class AuthorizeServiceImpl implements AuthorizeService
return false;
}
Boolean cachedResult = c.getCachedAuthorizationResult(o, Constants.ADMIN, e);
if (cachedResult != null) {
return cachedResult.booleanValue();
}
//
// First, check all Resource Policies directly on this object
//
@@ -371,19 +402,27 @@ public class AuthorizeServiceImpl implements AuthorizeService
// check policies for date validity
if (resourcePolicyService.isDateValid(rp))
{
if (rp.getEPerson() != null && rp.getEPerson().equals(c.getCurrentUser()))
if (rp.getEPerson() != null && rp.getEPerson().equals(e))
{
c.cacheAuthorizedAction(o, Constants.ADMIN, e, true, rp);
return true; // match
}
if ((rp.getGroup() != null)
&& (groupService.isMember(c, rp.getGroup())))
&& (groupService.isMember(c, e, rp.getGroup())))
{
// group was set, and eperson is a member
// of that group
c.cacheAuthorizedAction(o, Constants.ADMIN, e, true, rp);
return true;
}
}
if(c.isReadOnly()) {
//When we are in read-only mode, we will cache authorized actions in a different way
//So we remove this resource policy from the cache.
c.uncacheEntity(rp);
}
}
// If user doesn't have specific Admin permissions on this object,
@@ -393,9 +432,12 @@ public class AuthorizeServiceImpl implements AuthorizeService
DSpaceObject parent = serviceFactory.getDSpaceObjectService(o).getParentObject(c, o);
if (parent != null)
{
return isAdmin(c, parent);
boolean admin = isAdmin(c, e, parent);
c.cacheAuthorizedAction(o, Constants.ADMIN, e, admin, null);
return admin;
}
c.cacheAuthorizedAction(o, Constants.ADMIN, e, false, null);
return false;
}
@@ -418,7 +460,23 @@ public class AuthorizeServiceImpl implements AuthorizeService
return groupService.isMember(c, Group.ADMIN);
}
}
@Override
public boolean isAdmin(Context c, EPerson e) throws SQLException
{
// if we're ignoring authorization, user is member of admin
if (c.ignoreAuthorization())
{
return true;
}
if (e == null)
{
return false; // anonymous users can't be admins....
} else
{
return groupService.isMember(c, e, Group.ADMIN);
}
}
public boolean isCommunityAdmin(Context c) throws SQLException
{
EPerson e = c.getCurrentUser();
@@ -624,7 +682,7 @@ public class AuthorizeServiceImpl implements AuthorizeService
List<Group> groups = new ArrayList<Group>();
for (ResourcePolicy resourcePolicy : policies) {
if(resourcePolicy.getGroup() != null)
if(resourcePolicy.getGroup() != null && resourcePolicyService.isDateValid(resourcePolicy))
{
groups.add(resourcePolicy.getGroup());
}
@@ -642,13 +700,14 @@ public class AuthorizeServiceImpl implements AuthorizeService
@Override
public boolean isAnIdenticalPolicyAlreadyInPlace(Context c, DSpaceObject dso, Group group, int action, int policyID) throws SQLException
{
return findByTypeIdGroupAction(c, dso, group, action, policyID) != null;
return !resourcePolicyService.findByTypeGroupActionExceptId(c, dso, group, action, policyID).isEmpty();
}
@Override
public ResourcePolicy findByTypeIdGroupAction(Context c, DSpaceObject dso, Group group, int action, int policyID) throws SQLException
public ResourcePolicy findByTypeGroupAction(Context c, DSpaceObject dso, Group group, int action)
throws SQLException
{
List<ResourcePolicy> policies = resourcePolicyService.find(c, dso, group, action, policyID);
List<ResourcePolicy> policies = resourcePolicyService.find(c, dso, group, action);
if (CollectionUtils.isNotEmpty(policies))
{
@@ -658,7 +717,6 @@ public class AuthorizeServiceImpl implements AuthorizeService
}
}
/**
* Generate Policies policies READ for the date in input adding reason. New policies are assigned automatically at the groups that
* have right on the collection. E.g., if the anonymous can access the collection policies are assigned to anonymous.
@@ -734,12 +792,19 @@ public class AuthorizeServiceImpl implements AuthorizeService
public ResourcePolicy createOrModifyPolicy(ResourcePolicy policy, Context context, String name, Group group, EPerson ePerson,
Date embargoDate, int action, String reason, DSpaceObject dso) throws AuthorizeException, SQLException
{
ResourcePolicy policyTemp = null;
if (policy != null)
{
List<ResourcePolicy> duplicates = resourcePolicyService.findByTypeGroupActionExceptId(context, dso, group, action, policy.getID());
if (!duplicates.isEmpty())
{
policy = duplicates.get(0);
}
} else {
// if an identical policy (same Action and same Group) is already in place modify it...
policyTemp = findByTypeGroupAction(context, dso, group, action);
}
int policyID = -1;
if (policy != null) policyID = policy.getID();
// if an identical policy (same Action and same Group) is already in place modify it...
ResourcePolicy policyTemp = findByTypeIdGroupAction(context, dso, group, action, policyID);
if (policyTemp != null)
{
policy = policyTemp;

View File

@@ -218,9 +218,9 @@ public class ResourcePolicy implements ReloadableEntity<Integer> {
}
/**
* gets ID for Group referred to by this policy
* gets the Group referred to by this policy
*
* @return groupID, or null if no group set
* @return group, or null if no group set
*/
public Group getGroup()
{
@@ -228,7 +228,7 @@ public class ResourcePolicy implements ReloadableEntity<Integer> {
}
/**
* sets ID for Group referred to by this policy
* sets the Group referred to by this policy
* @param epersonGroup Group
*/
public void setGroup(Group epersonGroup)

View File

@@ -104,13 +104,22 @@ public class ResourcePolicyServiceImpl implements ResourcePolicyService
}
@Override
public List<ResourcePolicy> find(Context c, DSpaceObject dso, Group group, int action, int notPolicyID) throws SQLException {
return resourcePolicyDAO.findByTypeIdGroupAction(c, dso, group, action, notPolicyID);
public List<ResourcePolicy> find(Context c, DSpaceObject dso, Group group, int action) throws SQLException {
return resourcePolicyDAO.findByTypeGroupAction(c, dso, group, action);
}
@Override
public List<ResourcePolicy> find(Context c, EPerson e, List<Group> groups, int action, int type_id) throws SQLException{
return resourcePolicyDAO.findByEPersonGroupTypeIdAction(c, e, groups, action, type_id);
}
@Override
public List<ResourcePolicy> findByTypeGroupActionExceptId(Context context, DSpaceObject dso, Group group, int action, int notPolicyID)
throws SQLException
{
return resourcePolicyDAO.findByTypeGroupActionExceptId(context, dso, group, action, notPolicyID);
}
/**
* Delete an ResourcePolicy

View File

@@ -34,7 +34,16 @@ public interface ResourcePolicyDAO extends GenericDAO<ResourcePolicy> {
public List<ResourcePolicy> findByDSoAndAction(Context context, DSpaceObject dso, int actionId) throws SQLException;
public List<ResourcePolicy> findByTypeIdGroupAction(Context context, DSpaceObject dso, Group group, int action, int notPolicyID) throws SQLException;
public List<ResourcePolicy> findByTypeGroupAction(Context context, DSpaceObject dso, Group group, int action) throws SQLException;
/**
* Look for ResourcePolicies by DSpaceObject, Group, and action, ignoring IDs with a specific PolicyID.
* This method can be used to detect duplicate ResourcePolicies.
* @param notPolicyID ResourcePolicies with this ID will be ignored while looking out for equal ResourcePolicies.
* @return List of resource policies for the same DSpaceObject, group and action but other policyID.
* @throws SQLException
*/
public List<ResourcePolicy> findByTypeGroupActionExceptId(Context context, DSpaceObject dso, Group group, int action, int notPolicyID) throws SQLException;
public List<ResourcePolicy> findByEPersonGroupTypeIdAction(Context context, EPerson e, List<Group> groups, int action, int type_id) throws SQLException;

View File

@@ -75,7 +75,7 @@ public class ResourcePolicyDAOImpl extends AbstractHibernateDAO<ResourcePolicy>
}
@Override
public List<ResourcePolicy> findByTypeIdGroupAction(Context context, DSpaceObject dso, Group group, int action, int notPolicyID) throws SQLException {
public List<ResourcePolicy> findByTypeGroupAction(Context context, DSpaceObject dso, Group group, int action) throws SQLException {
Criteria criteria = createCriteria(context, ResourcePolicy.class);
criteria.add(Restrictions.and(
Restrictions.eq("dSpaceObject", dso),
@@ -83,15 +83,21 @@ public class ResourcePolicyDAOImpl extends AbstractHibernateDAO<ResourcePolicy>
Restrictions.eq("actionId", action)
));
criteria.setMaxResults(1);
List<ResourcePolicy> results;
if (notPolicyID != -1)
{
criteria.add(Restrictions.and(Restrictions.not(Restrictions.eq("id", notPolicyID))));
}
return list(criteria);
}
@Override
public List<ResourcePolicy> findByTypeGroupActionExceptId(Context context, DSpaceObject dso, Group group, int action, int notPolicyID) throws SQLException {
Criteria criteria = createCriteria(context, ResourcePolicy.class);
criteria.add(Restrictions.and(
Restrictions.eq("dSpaceObject", dso),
Restrictions.eq("epersonGroup", group),
Restrictions.eq("actionId", action)
));
criteria.add(Restrictions.and(Restrictions.not(Restrictions.eq("id", notPolicyID))));
return list(criteria);
}
public List<ResourcePolicy> findByEPersonGroupTypeIdAction(Context context, EPerson e, List<Group> groups, int action, int type_id) throws SQLException
{
Criteria criteria = createCriteria(context, ResourcePolicy.class);

View File

@@ -167,11 +167,28 @@ public interface AuthorizeService {
*/
public boolean isAdmin(Context c, DSpaceObject o) throws SQLException;
/**
* Check to see if a specific user is an Administrator of a given object
* within DSpace. Always return {@code true} if the user is a System
* Admin
*
* @param c current context
* @param e the user to check
* @param o current DSpace Object, if <code>null</code> the call will be
* equivalent to a call to the <code>isAdmin(Context c)</code>
* method
* @return {@code true} if the user has administrative privileges on the
* given DSpace object
* @throws SQLException if database error
*/
public boolean isAdmin(Context c, EPerson e, DSpaceObject o) throws SQLException;
/**
* Check to see if the current user is a System Admin. Always return
* {@code true} if c.ignoreAuthorization is set. Anonymous users
* can't be Admins (EPerson set to NULL)
* {@code true} if c.ignoreAuthorization is set. If no EPerson is
* logged in and context.getCurrentUser() returns null, this method
* returns false as anonymous users can never be administrators.
*
* @param c current context
* @return {@code true} if user is an admin or ignore authorization
@@ -179,6 +196,17 @@ public interface AuthorizeService {
* @throws SQLException if database error
*/
public boolean isAdmin(Context c) throws SQLException;
/**
* Check to see if a specific user is system admin. Always return
* {@code true} if c.ignoreAuthorization is set.
*
* @param c current context
* @return {@code true} if user is an admin or ignore authorization
* flag set
* @throws SQLException if database error
*/
public boolean isAdmin(Context c, EPerson e) throws SQLException;
public boolean isCommunityAdmin(Context c) throws SQLException;
@@ -410,8 +438,8 @@ public interface AuthorizeService {
* @throws SQLException if there's a database problem
*/
public boolean isAnIdenticalPolicyAlreadyInPlace(Context c, DSpaceObject o, Group group, int actionID, int policyID) throws SQLException;
public ResourcePolicy findByTypeIdGroupAction(Context c, DSpaceObject dso, Group group, int action, int policyID) throws SQLException;
public ResourcePolicy findByTypeGroupAction(Context c, DSpaceObject dso, Group group, int action) throws SQLException;
/**

View File

@@ -33,11 +33,21 @@ public interface ResourcePolicyService extends DSpaceCRUDService<ResourcePolicy>
public List<ResourcePolicy> find(Context c, DSpaceObject o, int actionId) throws SQLException;
public List<ResourcePolicy> find(Context c, DSpaceObject dso, Group group, int action, int notPolicyID) throws SQLException;
public List<ResourcePolicy> find(Context c, DSpaceObject dso, Group group, int action) throws SQLException;
public List<ResourcePolicy> find(Context context, Group group) throws SQLException;
public List<ResourcePolicy> find(Context c, EPerson e, List<Group> groups, int action, int type_id) throws SQLException;
/**
* Look for ResourcePolicies by DSpaceObject, Group, and action, ignoring IDs with a specific PolicyID.
* This method can be used to detect duplicate ResourcePolicies.
* @param notPolicyID ResourcePolicies with this ID will be ignored while looking out for equal ResourcePolicies.
* @return List of resource policies for the same DSpaceObject, group and action but other policyID.
* @throws SQLException
*/
public List<ResourcePolicy> findByTypeGroupActionExceptId(Context context, DSpaceObject dso, Group group, int action, int notPolicyID)
throws SQLException;
public String getActionText(ResourcePolicy resourcePolicy);

View File

@@ -7,11 +7,6 @@
*/
package org.dspace.checker;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Date;
import java.util.Map;
import org.apache.commons.collections.MapUtils;
import org.apache.log4j.Logger;
import org.dspace.checker.factory.CheckerServiceFactory;
@@ -23,6 +18,11 @@ import org.dspace.core.Context;
import org.dspace.storage.bitstore.factory.StorageServiceFactory;
import org.dspace.storage.bitstore.service.BitstreamStorageService;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Date;
import java.util.Map;
/**
* <p>
* Main class for the checksum checker tool, which calculates checksums for each
@@ -127,6 +127,7 @@ public final class CheckerCommand
collector.collect(context, info);
}
context.uncacheEntity(bitstream);
bitstream = dispatcher.next();
}
}

View File

@@ -55,7 +55,7 @@ public class ChecksumHistory implements ReloadableEntity<Long>
private String checksumCalculated;
@ManyToOne
@JoinColumn(name = "result")
@JoinColumn(name = "result", referencedColumnName = "result_code")
private ChecksumResult checksumResult;

View File

@@ -7,20 +7,7 @@
*/
package org.dspace.checker;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Date;
import java.util.GregorianCalendar;
import javax.mail.MessagingException;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.cli.*;
import org.apache.log4j.Logger;
import org.dspace.checker.factory.CheckerServiceFactory;
import org.dspace.checker.service.SimpleReporterService;
@@ -28,6 +15,14 @@ import org.dspace.core.ConfigurationManager;
import org.dspace.core.Context;
import org.dspace.core.Email;
import javax.mail.MessagingException;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Date;
import java.util.GregorianCalendar;
/**
* <p>
* The email reporter creates and sends emails to an administrator. This only
@@ -184,7 +179,7 @@ public class DailyReportEmailer
try
{
context = new Context();
context = new Context(Context.Mode.READ_ONLY);
// the number of bitstreams in report
int numBitstreams = 0;

View File

@@ -7,6 +7,8 @@
*/
package org.dspace.checker;
import org.apache.commons.lang.builder.EqualsBuilder;
import org.apache.commons.lang.builder.HashCodeBuilder;
import org.dspace.content.Bitstream;
import javax.persistence.*;
@@ -57,7 +59,7 @@ public class MostRecentChecksum implements Serializable
private boolean bitstreamFound;
@OneToOne
@JoinColumn(name= "result")
@JoinColumn(name= "result", referencedColumnName = "result_code")
private ChecksumResult checksumResult;
/**
@@ -155,4 +157,44 @@ public class MostRecentChecksum implements Serializable
public void setBitstreamFound(boolean bitstreamFound) {
this.bitstreamFound = bitstreamFound;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
MostRecentChecksum that = (MostRecentChecksum) o;
return new EqualsBuilder()
.append(toBeProcessed, that.toBeProcessed)
.append(matchedPrevChecksum, that.matchedPrevChecksum)
.append(infoFound, that.infoFound)
.append(bitstreamFound, that.bitstreamFound)
.append(bitstream, that.bitstream)
.append(expectedChecksum, that.expectedChecksum)
.append(currentChecksum, that.currentChecksum)
.append(processStartDate, that.processStartDate)
.append(processEndDate, that.processEndDate)
.append(checksumAlgorithm, that.checksumAlgorithm)
.append(checksumResult, that.checksumResult)
.isEquals();
}
@Override
public int hashCode() {
return new HashCodeBuilder(17, 37)
.append(bitstream)
.append(toBeProcessed)
.append(expectedChecksum)
.append(currentChecksum)
.append(processStartDate)
.append(processEndDate)
.append(checksumAlgorithm)
.append(matchedPrevChecksum)
.append(infoFound)
.append(bitstreamFound)
.append(checksumResult)
.toHashCode();
}
}

View File

@@ -145,17 +145,14 @@ public final class ResultsPruner
throw new IllegalStateException("Problem parsing duration: "
+ e.getMessage(), e);
}
ChecksumResultCode code = ChecksumResultCode.valueOf(resultCode);
if(code == null)
{
throw new IllegalStateException("Checksum result code not found: " + resultCode);
}
if ("default".equals(resultCode))
{
if ("default".equals(resultCode)) {
rp.setDefaultDuration(duration);
}
else
{
} else {
ChecksumResultCode code = ChecksumResultCode.valueOf(resultCode);
if (code == null) {
throw new IllegalStateException("Checksum result code not found: " + resultCode);
}
rp.addInterested(code, duration);
}
}

View File

@@ -166,7 +166,7 @@ public class BitstreamFormat implements Serializable, ReloadableEntity<Integer>
*
* @return the MIME type
*/
public final String getMIMEType()
public String getMIMEType()
{
return mimetype;
}
@@ -177,7 +177,7 @@ public class BitstreamFormat implements Serializable, ReloadableEntity<Integer>
* @param s
* the new MIME type
*/
public final void setMIMEType(String s)
public void setMIMEType(String s)
{
this.mimetype = s;
}

View File

@@ -248,21 +248,21 @@ public class BitstreamServiceImpl extends DSpaceObjectServiceImpl<Bitstream> imp
context.addEvent(new Event(Event.DELETE, Constants.BITSTREAM, bitstream.getID(),
String.valueOf(bitstream.getSequenceID()), getIdentifiers(context, bitstream)));
//Remove our bitstream from all our bundles
final List<Bundle> bundles = bitstream.getBundles();
for (Bundle bundle : bundles) {
bundle.getBitstreams().remove(bitstream);
}
// Remove policies
authorizeService.removeAllPolicies(context, bitstream);
// Remove bitstream itself
bitstream.setDeleted(true);
update(context, bitstream);
//Remove our bitstream from all our bundles
final List<Bundle> bundles = bitstream.getBundles();
for (Bundle bundle : bundles) {
bundle.removeBitstream(bitstream);
}
//Remove all bundles from the bitstream object, clearing the connection in 2 ways
bundles.clear();
// Remove policies only after the bitstream has been updated (otherwise the current user has not WRITE rights)
authorizeService.removeAllPolicies(context, bitstream);
}
@Override

View File

@@ -10,6 +10,7 @@ package org.dspace.content;
import java.sql.SQLException;
import java.util.*;
import org.apache.commons.collections.CollectionUtils;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.BundleService;
import org.dspace.core.Constants;
@@ -130,18 +131,40 @@ public class Bundle extends DSpaceObject implements DSpaceObjectLegacySupport
}
/**
* Get the bitstreams in this bundle
* Get a copy of the bitstream list of this bundle
* Note that this is a copy and if you wish to manipulate the bistream list, you should use
* {@ref Bundle.addBitstream}, {@ref Bundle.removeBitstream} or {@ref Bundle.clearBitstreams}
*
* @return the bitstreams
*/
public List<Bitstream> getBitstreams() {
return bitstreams;
List<Bitstream> bitstreamList = new LinkedList<>(this.bitstreams);
return bitstreamList;
}
/**
* Add a new bitstream to this bundle.
* @param bitstream
*/
void addBitstream(Bitstream bitstream){
bitstreams.add(bitstream);
}
/**
* Clear the list of bitstream of this bundle
*/
public void clearBitstreams() {
bitstreams.clear();
}
/**
* Remove the given bitstream from this bundles bitstream list
* @param bitstream The bitstream to remove
*/
public void removeBitstream(Bitstream bitstream) {
bitstreams.remove(bitstream);
}
/**
* Get the items this bundle appears in
*
@@ -215,5 +238,4 @@ public class Bundle extends DSpaceObject implements DSpaceObjectLegacySupport
}
return bundleService;
}
}

View File

@@ -198,7 +198,7 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl<Bundle> implement
// We don't need to remove the link between bundle & bitstream, this will be handled in the delete() method.
bitstreamService.delete(context, bitstream);
}else{
bundle.getBitstreams().remove(bitstream);
bundle.removeBitstream(bitstream);
bitstream.getBundles().remove(bundle);
}
}
@@ -269,29 +269,60 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl<Bundle> implement
public void setOrder(Context context, Bundle bundle, UUID[] bitstreamIds) throws AuthorizeException, SQLException {
authorizeService.authorizeAction(context, bundle, Constants.WRITE);
bundle.getBitstreams().clear();
List<Bitstream> currentBitstreams = bundle.getBitstreams();
List<Bitstream> updatedBitstreams = new ArrayList<Bitstream>();
// Loop through and ensure these Bitstream IDs are all valid. Add them to list of updatedBitstreams.
for (int i = 0; i < bitstreamIds.length; i++) {
UUID bitstreamId = bitstreamIds[i];
Bitstream bitstream = bitstreamService.find(context, bitstreamId);
if(bitstream == null){
// If we have an invalid Bitstream ID, just ignore it, but log a warning
if(bitstream == null) {
//This should never occur but just in case
log.warn(LogManager.getHeader(context, "Invalid bitstream id while changing bitstream order", "Bundle: " + bundle.getID() + ", bitstream id: " + bitstreamId));
continue;
}
bitstream.getBundles().remove(bundle);
bundle.getBitstreams().add(bitstream);
bitstream.getBundles().add(bundle);
bitstreamService.update(context, bitstream);
// If we have a Bitstream not in the current list, log a warning & exit immediately
if(!currentBitstreams.contains(bitstream))
{
log.warn(LogManager.getHeader(context, "Encountered a bitstream not in this bundle while changing bitstream order. Bitstream order will not be changed.", "Bundle: " + bundle.getID() + ", bitstream id: " + bitstreamId));
return;
}
updatedBitstreams.add(bitstream);
}
//The order of the bitstreams has changed, ensure that we update the last modified of our item
Item owningItem = (Item) getParentObject(context, bundle);
if(owningItem != null)
// If our lists are different sizes, exit immediately
if(updatedBitstreams.size()!=currentBitstreams.size())
{
itemService.updateLastModified(context, owningItem);
itemService.update(context, owningItem);
log.warn(LogManager.getHeader(context, "Size of old list and new list do not match. Bitstream order will not be changed.", "Bundle: " + bundle.getID()));
return;
}
// As long as the order has changed, update it
if(CollectionUtils.isNotEmpty(updatedBitstreams) && !updatedBitstreams.equals(currentBitstreams))
{
//First clear out the existing list of bitstreams
bundle.clearBitstreams();
// Now add them back in the proper order
for (Bitstream bitstream : updatedBitstreams)
{
bitstream.getBundles().remove(bundle);
bundle.addBitstream(bitstream);
bitstream.getBundles().add(bundle);
bitstreamService.update(context, bitstream);
}
//The order of the bitstreams has changed, ensure that we update the last modified of our item
Item owningItem = (Item) getParentObject(context, bundle);
if(owningItem != null)
{
itemService.updateLastModified(context, owningItem);
itemService.update(context, owningItem);
}
}
}
@@ -399,16 +430,15 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl<Bundle> implement
bundle.getName(), getIdentifiers(context, bundle)));
// Remove bitstreams
Iterator<Bitstream> bitstreams = bundle.getBitstreams().iterator();
while (bitstreams.hasNext()) {
Bitstream bitstream = bitstreams.next();
bitstreams.remove();
List<Bitstream> bitstreams = bundle.getBitstreams();
bundle.clearBitstreams();
for (Bitstream bitstream : bitstreams) {
removeBitstream(context, bundle, bitstream);
}
Iterator<Item> items = bundle.getItems().iterator();
while (items.hasNext()) {
Item item = items.next();
List<Item> items = new LinkedList<>(bundle.getItems());
bundle.getItems().clear();
for (Item item : items) {
item.removeBundle(bundle);
}

View File

@@ -7,16 +7,18 @@
*/
package org.dspace.content;
import org.dspace.content.comparator.NameAscendingComparator;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService;
import org.dspace.core.*;
import org.dspace.eperson.Group;
import org.hibernate.annotations.CacheConcurrencyStrategy;
import org.hibernate.proxy.HibernateProxyHelper;
import javax.persistence.*;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.*;
import org.dspace.authorize.AuthorizeException;
/**
* Class representing a collection.
@@ -34,6 +36,8 @@ import java.util.List;
*/
@Entity
@Table(name="collection")
@Cacheable
@org.hibernate.annotations.Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, include = "non-lazy")
public class Collection extends DSpaceObject implements DSpaceObjectLegacySupport
{
@@ -83,7 +87,7 @@ public class Collection extends DSpaceObject implements DSpaceObjectLegacySuppor
joinColumns = {@JoinColumn(name = "collection_id") },
inverseJoinColumns = {@JoinColumn(name = "community_id") }
)
private final List<Community> communities = new ArrayList<>();
private Set<Community> communities = new HashSet<>();
@Transient
private transient CollectionService collectionService;
@@ -263,7 +267,11 @@ public class Collection extends DSpaceObject implements DSpaceObjectLegacySuppor
*/
public List<Community> getCommunities() throws SQLException
{
return communities;
// We return a copy because we do not want people to add elements to this collection directly.
// We return a list to maintain backwards compatibility
Community[] output = communities.toArray(new Community[]{});
Arrays.sort(output, new NameAscendingComparator());
return Arrays.asList(output);
}
void addCommunity(Community community) {
@@ -271,7 +279,7 @@ public class Collection extends DSpaceObject implements DSpaceObjectLegacySuppor
setModified();
}
void removeCommunity(Community community){
void removeCommunity(Community community) {
this.communities.remove(community);
setModified();
}
@@ -328,9 +336,10 @@ public class Collection extends DSpaceObject implements DSpaceObjectLegacySuppor
return Constants.COLLECTION;
}
public void setWorkflowGroup(int step, Group g)
public void setWorkflowGroup(Context context, int step, Group g)
throws SQLException, AuthorizeException
{
getCollectionService().setWorkflowGroup(this, step, g);
getCollectionService().setWorkflowGroup(context, this, step, g);
}
@Override
@@ -345,4 +354,4 @@ public class Collection extends DSpaceObject implements DSpaceObjectLegacySuppor
}
return collectionService;
}
}
}

View File

@@ -32,6 +32,7 @@ import java.io.IOException;
import java.io.InputStream;
import java.sql.SQLException;
import java.util.*;
import org.dspace.authorize.service.ResourcePolicyService;
/**
* Service implementation for the Collection object.
@@ -51,6 +52,8 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
@Autowired(required = true)
protected AuthorizeService authorizeService;
@Autowired(required = true)
protected ResourcePolicyService resourcePolicyService;
@Autowired(required = true)
protected BitstreamService bitstreamService;
@Autowired(required = true)
protected ItemService itemService;
@@ -334,30 +337,77 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
groupService.setName(g,
"COLLECTION_" + collection.getID() + "_WORKFLOW_STEP_" + step);
groupService.update(context, g);
setWorkflowGroup(collection, step, g);
setWorkflowGroup(context, collection, step, g);
authorizeService.addPolicy(context, collection, Constants.ADD, g);
}
return getWorkflowGroup(collection, step);
}
@Override
public void setWorkflowGroup(Collection collection, int step, Group group) {
public void setWorkflowGroup(Context context, Collection collection, int step, Group group)
throws SQLException, AuthorizeException
{
// we need to store the old group to be able to revoke permissions if granted before
Group oldGroup = null;
int action;
switch (step)
{
case 1:
oldGroup = collection.getWorkflowStep1();
action = Constants.WORKFLOW_STEP_1;
collection.setWorkflowStep1(group);
break;
case 2:
oldGroup = collection.getWorkflowStep2();
action = Constants.WORKFLOW_STEP_2;
collection.setWorkflowStep2(group);
break;
case 3:
oldGroup = collection.getWorkflowStep3();
action = Constants.WORKFLOW_STEP_3;
collection.setWorkflowStep3(group);
break;
default:
throw new IllegalArgumentException("Illegal step count: " + step);
}
// deal with permissions.
try
{
context.turnOffAuthorisationSystem();
// remove the policies for the old group
if (oldGroup != null)
{
Iterator<ResourcePolicy> oldPolicies =
resourcePolicyService.find(context, collection, oldGroup, action).iterator();
while (oldPolicies.hasNext())
{
resourcePolicyService.delete(context, oldPolicies.next());
}
oldPolicies = resourcePolicyService.find(context, collection, oldGroup, Constants.ADD).iterator();
while (oldPolicies.hasNext())
{
ResourcePolicy rp = oldPolicies.next();
if (rp.getRpType() == ResourcePolicy.TYPE_WORKFLOW)
{
resourcePolicyService.delete(context, rp);
}
}
}
// group can be null to delete workflow step.
// we need to grant permissions if group is not null
if (group != null)
{
authorizeService.addPolicy(context, collection, action, group, ResourcePolicy.TYPE_WORKFLOW);
authorizeService.addPolicy(context, collection, Constants.ADD, group, ResourcePolicy.TYPE_WORKFLOW);
}
} finally {
context.restoreAuthSystemState();
}
collection.setModified();
}
@Override
@@ -749,8 +799,8 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl<Collection> i
while (owningCommunities.hasNext())
{
Community owningCommunity = owningCommunities.next();
owningCommunities.remove();
owningCommunity.getCollections().remove(collection);
collection.removeCommunity(owningCommunity);
owningCommunity.removeCollection(collection);
}
collectionDAO.delete(context, collection);

View File

@@ -9,10 +9,12 @@ package org.dspace.content;
import org.apache.commons.lang.builder.HashCodeBuilder;
import org.apache.log4j.Logger;
import org.dspace.content.comparator.NameAscendingComparator;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CommunityService;
import org.dspace.core.*;
import org.dspace.eperson.Group;
import org.hibernate.annotations.CacheConcurrencyStrategy;
import org.hibernate.proxy.HibernateProxyHelper;
import javax.persistence.*;
@@ -30,6 +32,8 @@ import java.util.*;
*/
@Entity
@Table(name="community")
@Cacheable
@org.hibernate.annotations.Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, include = "non-lazy")
public class Community extends DSpaceObject implements DSpaceObjectLegacySupport
{
/** log4j category */
@@ -44,13 +48,13 @@ public class Community extends DSpaceObject implements DSpaceObjectLegacySupport
joinColumns = {@JoinColumn(name = "parent_comm_id") },
inverseJoinColumns = {@JoinColumn(name = "child_comm_id") }
)
private final List<Community> subCommunities = new ArrayList<>();
private Set<Community> subCommunities = new HashSet<>();
@ManyToMany(fetch = FetchType.LAZY, mappedBy = "subCommunities")
private List<Community> parentCommunities = new ArrayList<>();
private Set<Community> parentCommunities = new HashSet<>();
@ManyToMany(fetch = FetchType.LAZY, mappedBy = "communities", cascade = {CascadeType.PERSIST})
private final List<Collection> collections = new ArrayList<>();
private Set<Collection> collections = new HashSet<>();
@OneToOne
@JoinColumn(name = "admin")
@@ -85,13 +89,13 @@ public class Community extends DSpaceObject implements DSpaceObjectLegacySupport
void addSubCommunity(Community subCommunity)
{
getSubcommunities().add(subCommunity);
subCommunities.add(subCommunity);
setModified();
}
void removeSubCommunity(Community subCommunity)
{
getSubcommunities().remove(subCommunity);
subCommunities.remove(subCommunity);
setModified();
}
@@ -140,17 +144,21 @@ public class Community extends DSpaceObject implements DSpaceObjectLegacySupport
*/
public List<Collection> getCollections()
{
return collections;
// We return a copy because we do not want people to add elements to this collection directly.
// We return a list to maintain backwards compatibility
Collection[] output = collections.toArray(new Collection[]{});
Arrays.sort(output, new NameAscendingComparator());
return Arrays.asList(output);
}
void addCollection(Collection collection)
{
getCollections().add(collection);
collections.add(collection);
}
void removeCollection(Collection collection)
{
getCollections().remove(collection);
collections.remove(collection);
}
/**
@@ -162,7 +170,11 @@ public class Community extends DSpaceObject implements DSpaceObjectLegacySupport
*/
public List<Community> getSubcommunities()
{
return subCommunities;
// We return a copy because we do not want people to add elements to this collection directly.
// We return a list to maintain backwards compatibility
Community[] output = subCommunities.toArray(new Community[]{});
Arrays.sort(output, new NameAscendingComparator());
return Arrays.asList(output);
}
/**
@@ -173,16 +185,25 @@ public class Community extends DSpaceObject implements DSpaceObjectLegacySupport
*/
public List<Community> getParentCommunities()
{
return parentCommunities;
// We return a copy because we do not want people to add elements to this collection directly.
// We return a list to maintain backwards compatibility
Community[] output = parentCommunities.toArray(new Community[]{});
Arrays.sort(output, new NameAscendingComparator());
return Arrays.asList(output);
}
void addParentCommunity(Community parentCommunity) {
getParentCommunities().add(parentCommunity);
parentCommunities.add(parentCommunity);
}
void clearParentCommunities(){
this.parentCommunities.clear();
this.parentCommunities = null;
parentCommunities.clear();
}
public void removeParentCommunity(Community parentCommunity)
{
parentCommunities.remove(parentCommunity);
setModified();
}
/**

View File

@@ -455,7 +455,7 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl<Community> imp
rawDelete(context, childCommunity);
childCommunity.getParentCommunities().remove(parentCommunity);
childCommunity.removeParentCommunity(parentCommunity);
parentCommunity.removeSubCommunity(childCommunity);
log.info(LogManager.getHeader(context, "remove_subcommunity",
@@ -492,7 +492,7 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl<Community> imp
Iterator<Community> subcommunities = community.getSubcommunities().iterator();
while (subcommunities.hasNext()) {
Community subCommunity = subcommunities.next();
subcommunities.remove();
community.removeSubCommunity(subCommunity);
delete(context, subCommunity);
}
// now let the parent remove the community
@@ -535,7 +535,7 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl<Community> imp
while (collections.hasNext())
{
Collection collection = collections.next();
collections.remove();
community.removeCollection(collection);
removeCollection(context, community, collection);
}
// delete subcommunities
@@ -544,7 +544,7 @@ public class CommunityServiceImpl extends DSpaceObjectServiceImpl<Community> imp
while (subCommunities.hasNext())
{
Community subComm = subCommunities.next();
subCommunities.remove();
community.removeSubCommunity(subComm);
delete(context, subComm);
}

View File

@@ -7,17 +7,18 @@
*/
package org.dspace.content;
import org.dspace.content.comparator.NameAscendingComparator;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.hibernate.annotations.Sort;
import org.hibernate.annotations.SortType;
import org.hibernate.proxy.HibernateProxyHelper;
import javax.persistence.*;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.*;
/**
* Class representing an item in DSpace.
@@ -78,7 +79,7 @@ public class Item extends DSpaceObject implements DSpaceObjectLegacySupport
joinColumns = {@JoinColumn(name = "item_id") },
inverseJoinColumns = {@JoinColumn(name = "collection_id") }
)
private final List<Collection> collections = new ArrayList<>();
private final Set<Collection> collections = new HashSet<>();
@ManyToMany(fetch = FetchType.LAZY, mappedBy = "items")
private final List<Bundle> bundles = new ArrayList<>();
@@ -224,23 +225,31 @@ public class Item extends DSpaceObject implements DSpaceObjectLegacySupport
}
/**
* Get the collections this item is in. The order is indeterminate.
* Get the collections this item is in. The order is sorted ascending by collection name.
*
* @return the collections this item is in, if any.
*/
public List<Collection> getCollections()
{
return collections;
// We return a copy because we do not want people to add elements to this collection directly.
// We return a list to maintain backwards compatibility
Collection[] output = collections.toArray(new Collection[]{});
Arrays.sort(output, new NameAscendingComparator());
return Arrays.asList(output);
}
void addCollection(Collection collection)
{
getCollections().add(collection);
collections.add(collection);
}
void removeCollection(Collection collection)
{
getCollections().remove(collection);
collections.remove(collection);
}
public void clearCollections(){
collections.clear();
}
public Collection getTemplateItemOf() {

View File

@@ -651,7 +651,7 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
}
//Only clear collections after we have removed everything else from the item
item.getCollections().clear();
item.clearCollections();
item.setOwningCollection(null);
// Finally remove item row

View File

@@ -21,6 +21,7 @@ import org.springframework.beans.factory.annotation.Autowired;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Iterator;
import java.util.List;
/**
@@ -98,7 +99,7 @@ public class MetadataValueServiceImpl implements MetadataValueService {
}
@Override
public List<MetadataValue> findByValueLike(Context context, String value) throws SQLException {
public Iterator<MetadataValue> findByValueLike(Context context, String value) throws SQLException {
return metadataValueDAO.findByValueLike(context, value);
}

View File

@@ -12,7 +12,9 @@ import org.dspace.content.service.SiteService;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.hibernate.annotations.CacheConcurrencyStrategy;
import javax.persistence.Cacheable;
import javax.persistence.Entity;
import javax.persistence.Table;
import javax.persistence.Transient;
@@ -22,6 +24,8 @@ import javax.persistence.Transient;
* By default, the handle suffix "0" represents the Site, e.g. "1721.1/0"
*/
@Entity
@Cacheable
@org.hibernate.annotations.Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE)
@Table(name = "site")
public class Site extends DSpaceObject
{

View File

@@ -93,11 +93,8 @@ public class WorkspaceItemServiceImpl implements WorkspaceItemService {
Item item = itemService.create(context, workspaceItem);
item.setSubmitter(context.getCurrentUser());
// Now create the policies for the submitter and workflow
// users to modify item and contents
// Now create the policies for the submitter to modify item and contents
// contents = bitstreams, bundles
// FIXME: icky hardcoded workflow steps
workflowService.addInitialWorkspaceItemPolicies(context, workspaceItem);
// read permission
authorizeService.addPolicy(context, item, Constants.READ, item.getSubmitter(), ResourcePolicy.TYPE_SUBMISSION);
// write permission

View File

@@ -0,0 +1,39 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.content.comparator;
import org.apache.commons.lang.ObjectUtils;
import org.apache.commons.lang.StringUtils;
import org.dspace.content.DSpaceObject;
import java.util.Comparator;
public class NameAscendingComparator implements Comparator<DSpaceObject>{
@Override
public int compare(DSpaceObject dso1, DSpaceObject dso2) {
if (dso1 == dso2){
return 0;
}else if (dso1 == null){
return -1;
}else if (dso2 == null){
return 1;
}else {
String name1 = StringUtils.trimToEmpty(dso1.getName());
String name2 = StringUtils.trimToEmpty(dso2.getName());
//When two DSO's have the same name, use their UUID to put them in an order
if(name1.equals(name2)) {
return ObjectUtils.compare(dso1.getID(), dso2.getID());
} else {
return name1.compareToIgnoreCase(name2);
}
}
}
}

View File

@@ -13,6 +13,7 @@ import org.dspace.core.Context;
import org.dspace.core.GenericDAO;
import java.sql.SQLException;
import java.util.Iterator;
import java.util.List;
/**
@@ -26,7 +27,7 @@ public interface MetadataValueDAO extends GenericDAO<MetadataValue> {
public List<MetadataValue> findByField(Context context, MetadataField fieldId) throws SQLException;
public List<MetadataValue> findByValueLike(Context context, String value) throws SQLException;
public Iterator<MetadataValue> findByValueLike(Context context, String value) throws SQLException;
public void deleteByMetadataField(Context context, MetadataField metadataField) throws SQLException;

View File

@@ -121,6 +121,8 @@ public class CollectionDAOImpl extends AbstractHibernateDSODAO<Collection> imple
Restrictions.eq("resourcePolicy.eperson", ePerson),
actionQuery
));
criteria.setCacheable(true);
return list(criteria);
}
@@ -160,6 +162,8 @@ public class CollectionDAOImpl extends AbstractHibernateDSODAO<Collection> imple
query.append(" AND rp.epersonGroup.id IN (select g.id from Group g where (from EPerson e where e.id = :eperson_id) in elements(epeople))");
Query hibernateQuery = createQuery(context, query.toString());
hibernateQuery.setParameter("eperson_id", ePerson.getID());
hibernateQuery.setCacheable(true);
return list(hibernateQuery);

View File

@@ -91,6 +91,7 @@ public class CommunityDAOImpl extends AbstractHibernateDSODAO<Community> impleme
Query query = createQuery(context, queryBuilder.toString());
query.setParameter(sortField.toString(), sortField.getID());
query.setCacheable(true);
return findMany(context, query);
}
@@ -129,6 +130,8 @@ public class CommunityDAOImpl extends AbstractHibernateDSODAO<Community> impleme
Restrictions.eq("resourcePolicy.eperson", ePerson),
actionQuery
));
criteria.setCacheable(true);
return list(criteria);
}
@@ -164,6 +167,8 @@ public class CommunityDAOImpl extends AbstractHibernateDSODAO<Community> impleme
query.append(" AND rp.epersonGroup.id IN (select g.id from Group g where (from EPerson e where e.id = :eperson_id) in elements(epeople))");
Query hibernateQuery = createQuery(context, query.toString());
hibernateQuery.setParameter("eperson_id", ePerson.getID());
hibernateQuery.setCacheable(true);
return list(hibernateQuery);
}

View File

@@ -104,7 +104,8 @@ public class ItemDAOImpl extends AbstractHibernateDSODAO<Item> implements ItemDA
addMetadataLeftJoin(query, Item.class.getSimpleName().toLowerCase(), Collections.singletonList(metadataField));
query.append(" WHERE item.inArchive = :in_archive");
query.append(" AND item.submitter =:submitter");
addMetadataSortQuery(query, Collections.singletonList(metadataField), null);
//submissions should sort in reverse by date by default
addMetadataSortQuery(query, Collections.singletonList(metadataField), null, Collections.singletonList("desc"));
Query hibernateQuery = createQuery(context, query.toString());
hibernateQuery.setParameter(metadataField.toString(), metadataField.getID());

View File

@@ -18,6 +18,7 @@ import org.hibernate.Query;
import org.hibernate.criterion.Restrictions;
import java.sql.SQLException;
import java.util.Iterator;
import java.util.List;
/**
@@ -48,14 +49,14 @@ public class MetadataValueDAOImpl extends AbstractHibernateDAO<MetadataValue> im
}
@Override
public List<MetadataValue> findByValueLike(Context context, String value) throws SQLException {
Criteria criteria = createCriteria(context, MetadataValue.class);
criteria.add(
Restrictions.like("value", "%" + value + "%")
);
criteria.setFetchMode("metadataField", FetchMode.JOIN);
public Iterator<MetadataValue> findByValueLike(Context context, String value) throws SQLException {
String queryString = "SELECT m FROM MetadataValue m JOIN m.metadataField f " +
"WHERE m.value like concat('%', concat(:searchString,'%')) ORDER BY m.id ASC";
return list(criteria);
Query query = createQuery(context, queryString);
query.setString("searchString", value);
return iterate(query);
}
@Override

View File

@@ -32,6 +32,7 @@ public class SiteDAOImpl extends AbstractHibernateDAO<Site> implements SiteDAO
@Override
public Site findSite(Context context) throws SQLException {
Criteria criteria = createCriteria(context, Site.class);
criteria.setCacheable(true);
return uniqueResult(criteria);
}
}

View File

@@ -161,7 +161,8 @@ public interface CollectionService extends DSpaceObjectService<Collection>, DSpa
* @param group
* the new workflow group, or <code>null</code>
*/
public void setWorkflowGroup(Collection collection, int step, Group group);
public void setWorkflowGroup(Context context, Collection collection, int step, Group group)
throws SQLException, AuthorizeException;
/**
* Get the the workflow group corresponding to a particular workflow step.

View File

@@ -15,6 +15,7 @@ import org.dspace.core.Context;
import java.io.IOException;
import java.sql.SQLException;
import java.util.Iterator;
import java.util.List;
/**
@@ -82,7 +83,7 @@ public interface MetadataValueService {
*/
public void delete(Context context, MetadataValue metadataValue) throws SQLException;
public List<MetadataValue> findByValueLike(Context context, String value) throws SQLException;
public Iterator<MetadataValue> findByValueLike(Context context, String value) throws SQLException;
public void deleteByMetadataField(Context context, MetadataField metadataField) throws SQLException;

View File

@@ -8,6 +8,7 @@
package org.dspace.core;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.collections.ListUtils;
import org.apache.commons.lang3.StringUtils;
import org.dspace.content.DSpaceObject;
import org.dspace.content.MetadataField;
@@ -92,7 +93,10 @@ public abstract class AbstractHibernateDSODAO<T extends DSpaceObject> extends Ab
}
}
protected void addMetadataSortQuery(StringBuilder query, List<MetadataField> metadataSortFields, List<String> columnSortFields)
protected void addMetadataSortQuery(StringBuilder query, List<MetadataField> metadataSortFields, List<String> columnSortFields) {
addMetadataSortQuery(query, metadataSortFields, columnSortFields, ListUtils.EMPTY_LIST);
}
protected void addMetadataSortQuery(StringBuilder query, List<MetadataField> metadataSortFields, List<String> columnSortFields, List<String> direction)
{
if(CollectionUtils.isNotEmpty(metadataSortFields)){
@@ -100,6 +104,8 @@ public abstract class AbstractHibernateDSODAO<T extends DSpaceObject> extends Ab
for (int i = 0; i < metadataSortFields.size(); i++) {
MetadataField metadataField = metadataSortFields.get(i);
query.append("STR(").append(metadataField.toString()).append(".value)");
String dir = direction.size() > i ? " " + direction.get(i) : "";
query.append(dir);
if(i != metadataSortFields.size() -1)
{
query.append(",");

View File

@@ -8,6 +8,9 @@
package org.dspace.core;
import org.apache.log4j.Logger;
import org.dspace.authorize.ResourcePolicy;
import org.dspace.content.DSpaceObject;
import org.dspace.core.exception.DatabaseSchemaValidationException;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
import org.dspace.eperson.factory.EPersonServiceFactory;
@@ -44,9 +47,6 @@ public class Context
{
private static final Logger log = Logger.getLogger(Context.class);
/** option flags */
public static final short READ_ONLY = 0x01;
/** Current user - null means anonymous access */
private EPerson currentUser;
@@ -77,13 +77,22 @@ public class Context
/** Event dispatcher name */
private String dispName = null;
/** options */
private short options = 0;
/** Context mode */
private Mode mode = Mode.READ_WRITE;
/** Cache that is only used the context is in READ_ONLY mode */
private ContextReadOnlyCache readOnlyCache = new ContextReadOnlyCache();
protected EventService eventService;
private DBConnection dbConnection;
public enum Mode {
READ_ONLY,
READ_WRITE,
BATCH_EDIT
}
static
{
// Before initializing a Context object, we need to ensure the database
@@ -100,6 +109,7 @@ public class Context
}
protected Context(EventService eventService, DBConnection dbConnection) {
this.mode = Mode.READ_WRITE;
this.eventService = eventService;
this.dbConnection = dbConnection;
init();
@@ -112,18 +122,19 @@ public class Context
*/
public Context()
{
this.mode = Mode.READ_WRITE;
init();
}
/**
* Construct a new context object with passed options. A database connection is opened.
* Construct a new context object with the given mode enabled. A database connection is opened.
* No user is authenticated.
*
* @param options context operation flags
* @param mode The mode to use when opening the context.
*/
public Context(short options)
public Context(Mode mode)
{
this.options = options;
this.mode = mode;
init();
}
@@ -145,8 +156,18 @@ public class Context
dbConnection = new DSpace().getSingletonService(DBConnection.class);
if(dbConnection == null)
{
//It appears there is a problem with the database, run the Schema validator
DatabaseSchemaValidator schemaValidator = new DSpace().getSingletonService(DatabaseSchemaValidator.class);
String validationError = schemaValidator == null ? "null" : schemaValidator.getDatabaseSchemaValidationError();
String message = "The schema validator returned: " +
validationError;
log.fatal("Cannot obtain the bean which provides a database connection. " +
"Check previous entries in the dspace.log to find why the db failed to initialize.");
"Check previous entries in the dspace.log to find why the db failed to initialize. " + message);
//Fail fast
throw new DatabaseSchemaValidationException(message);
}
}
@@ -159,6 +180,7 @@ public class Context
authStateChangeHistory = new Stack<Boolean>();
authStateClassCallHistory = new Stack<String>();
setMode(this.mode);
}
/**
@@ -350,8 +372,13 @@ public class Context
try
{
// As long as we have a valid, writeable database connection,
// commit any changes made as part of the transaction
commit();
// rollback any changes if we are in read-only mode,
// otherwise, commit any changes made as part of the transaction
if(isReadOnly()) {
abort();
} else {
commit();
}
}
finally
{
@@ -381,13 +408,19 @@ public class Context
log.info("commit() was called on a closed Context object. No changes to commit.");
}
if(isReadOnly()) {
throw new UnsupportedOperationException("You cannot commit a read-only context");
}
// Our DB Connection (Hibernate) will decide if an actual commit is required or not
try
{
// As long as we have a valid, writeable database connection,
// commit any changes made as part of the transaction
if (isValid() && !isReadOnly())
if (isValid())
{
// Dispatch events before committing changes to the database,
// as the consumers may change something too
dispatchEvents();
}
@@ -515,7 +548,7 @@ public class Context
try
{
// Rollback if we have a database connection, and it is NOT Read Only
// Rollback ONLY if we have a database connection, and it is NOT Read Only
if (isValid() && !isReadOnly())
{
dbConnection.rollback();
@@ -564,7 +597,7 @@ public class Context
*/
public boolean isReadOnly()
{
return (options & READ_ONLY) > 0;
return mode != null && mode == Mode.READ_ONLY;
}
public void setSpecialGroup(UUID groupID)
@@ -632,7 +665,7 @@ public class Context
/**
* Returns the size of the cache of all object that have been read from the database so far. A larger number
* means that more memory is consumed by the cache. This also has a negative impact on the query performance. In
* that case you should consider clearing the cache (see {@link Context#clearCache() clearCache}).
* that case you should consider uncaching entities when they are no longer needed (see {@link Context#uncacheEntity(ReloadableEntity)} () uncacheEntity}).
*
* @throws SQLException When connecting to the active cache fails.
*/
@@ -640,6 +673,59 @@ public class Context
return this.getDBConnection().getCacheSize();
}
/**
* Change the mode of this current context.
*
* BATCH_EDIT: Enabling batch edit mode means that the database connection is configured so that it is optimized to
* process a large number of records.
*
* READ_ONLY: READ ONLY mode will tell the database we are nog going to do any updates. This means it can disable
* optimalisations for delaying or grouping updates.
*
* READ_WRITE: This is the default mode and enables the normal database behaviour. This behaviour is optimal for querying and updating a
* small number of records.
*
* @param newMode The mode to put this context in
*/
public void setMode(Mode newMode) {
try {
//update the database settings
switch (newMode) {
case BATCH_EDIT:
dbConnection.setConnectionMode(true, false);
break;
case READ_ONLY:
dbConnection.setConnectionMode(false, true);
break;
case READ_WRITE:
dbConnection.setConnectionMode(false, false);
break;
default:
log.warn("New context mode detected that has nog been configured.");
break;
}
} catch(SQLException ex) {
log.warn("Unable to set database connection mode", ex);
}
//Always clear the cache, except when going from READ_ONLY to READ_ONLY
if(mode != Mode.READ_ONLY || newMode != Mode.READ_ONLY) {
//clear our read-only cache to prevent any inconsistencies
readOnlyCache.clear();
}
//save the new mode
mode = newMode;
}
/**
* The current database mode of this context.
* @return The current mode
*/
public Mode getCurrentMode() {
return mode;
}
/**
* Enable or disable "batch processing mode" for this context.
*
@@ -652,16 +738,22 @@ public class Context
* @param batchModeEnabled When true, batch processing mode will be enabled. If false, it will be disabled.
* @throws SQLException When configuring the database connection fails.
*/
@Deprecated
public void enableBatchMode(boolean batchModeEnabled) throws SQLException {
dbConnection.setOptimizedForBatchProcessing(batchModeEnabled);
if(batchModeEnabled) {
setMode(Mode.BATCH_EDIT);
} else {
setMode(Mode.READ_WRITE);
}
}
/**
* Check if "batch processing mode" is enabled for this context.
* @return True if batch processing mode is enabled, false otherwise.
*/
@Deprecated
public boolean isBatchModeEnabled() {
return dbConnection.isOptimizedForBatchProcessing();
return mode != null && mode == Mode.BATCH_EDIT;
}
/**
@@ -690,7 +782,53 @@ public class Context
dbConnection.uncacheEntity(entity);
}
public Boolean getCachedAuthorizationResult(DSpaceObject dspaceObject, int action, EPerson eperson) {
if(isReadOnly()) {
return readOnlyCache.getCachedAuthorizationResult(dspaceObject, action, eperson);
} else {
return null;
}
}
public void cacheAuthorizedAction(DSpaceObject dspaceObject, int action, EPerson eperson, Boolean result, ResourcePolicy rp) {
if(isReadOnly()) {
readOnlyCache.cacheAuthorizedAction(dspaceObject, action, eperson, result);
try {
uncacheEntity(rp);
} catch (SQLException e) {
log.warn("Unable to uncache a resource policy when in read-only mode", e);
}
}
}
public Boolean getCachedGroupMembership(Group group, EPerson eperson) {
if(isReadOnly()) {
return readOnlyCache.getCachedGroupMembership(group, eperson);
} else {
return null;
}
}
public void cacheGroupMembership(Group group, EPerson eperson, Boolean isMember) {
if(isReadOnly()) {
readOnlyCache.cacheGroupMembership(group, eperson, isMember);
}
}
public void cacheAllMemberGroupsSet(EPerson ePerson, Set<Group> groups) {
if(isReadOnly()) {
readOnlyCache.cacheAllMemberGroupsSet(ePerson, groups);
}
}
public Set<Group> getCachedAllMemberGroupsSet(EPerson ePerson) {
if(isReadOnly()) {
return readOnlyCache.getCachedAllMemberGroupsSet(ePerson);
} else {
return null;
}
}
/**
* Reload all entities related to this context.
* @throws SQLException When reloading one of the entities fails.

View File

@@ -0,0 +1,103 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.core;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.commons.lang3.tuple.ImmutableTriple;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.commons.lang3.tuple.Triple;
import org.dspace.content.DSpaceObject;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
import org.springframework.util.CollectionUtils;
import java.util.HashMap;
import java.util.Set;
/**
* Object that manages the read-only caches for the Context class
*/
public class ContextReadOnlyCache {
/**
* Authorized actions cache that is used when the context is in READ_ONLY mode.
* The key of the cache is: DSpace Object ID, action ID, Eperson ID.
*/
private final HashMap<Triple<String, Integer, String>, Boolean> authorizedActionsCache = new HashMap<>();
/**
* Group membership cache that is used when the context is in READ_ONLY mode.
* The key of the cache is: Group Name, Eperson ID.
*/
private final HashMap<Pair<String, String>, Boolean> groupMembershipCache = new HashMap<>();
/**
* Cache for all the groups the current ePerson is a member of when the context is in READ_ONLY mode.
*/
private final HashMap<String, Set<Group>> allMemberGroupsCache = new HashMap<>();
public Boolean getCachedAuthorizationResult(DSpaceObject dspaceObject, int action, EPerson eperson) {
return authorizedActionsCache.get(buildAuthorizedActionKey(dspaceObject, action, eperson));
}
public void cacheAuthorizedAction(DSpaceObject dspaceObject, int action, EPerson eperson, Boolean result) {
authorizedActionsCache.put(buildAuthorizedActionKey(dspaceObject, action, eperson), result);
}
public Boolean getCachedGroupMembership(Group group, EPerson eperson) {
String allMembersGroupKey = buildAllMembersGroupKey(eperson);
if (CollectionUtils.isEmpty(allMemberGroupsCache.get(allMembersGroupKey))) {
return groupMembershipCache.get(buildGroupMembershipKey(group, eperson));
} else {
return allMemberGroupsCache.get(allMembersGroupKey).contains(group);
}
}
public void cacheGroupMembership(Group group, EPerson eperson, Boolean isMember) {
if (CollectionUtils.isEmpty(allMemberGroupsCache.get(buildAllMembersGroupKey(eperson)))) {
groupMembershipCache.put(buildGroupMembershipKey(group, eperson), isMember);
}
}
public void cacheAllMemberGroupsSet(EPerson ePerson, Set<Group> groups) {
allMemberGroupsCache.put(buildAllMembersGroupKey(ePerson),
groups);
//clear the individual groupMembershipCache as we have all memberships now.
groupMembershipCache.clear();
}
public Set<Group> getCachedAllMemberGroupsSet(EPerson ePerson) {
return allMemberGroupsCache.get(buildAllMembersGroupKey(ePerson));
}
public void clear() {
authorizedActionsCache.clear();
groupMembershipCache.clear();
allMemberGroupsCache.clear();
}
private String buildAllMembersGroupKey(EPerson ePerson) {
return ePerson == null ? "" : ePerson.getID().toString();
}
private ImmutableTriple<String, Integer, String> buildAuthorizedActionKey(DSpaceObject dspaceObject, int action, EPerson eperson) {
return new ImmutableTriple<>(dspaceObject == null ? "" : dspaceObject.getID().toString(),
Integer.valueOf(action),
eperson == null ? "" : eperson.getID().toString());
}
private Pair<String, String> buildGroupMembershipKey(Group group, EPerson eperson) {
return new ImmutablePair<>(group == null ? "" : group.getName(),
eperson == null ? "" : eperson.getID().toString());
}
}

View File

@@ -40,7 +40,7 @@ public interface DBConnection<T> {
public DatabaseConfigVO getDatabaseConfig() throws SQLException;
public void setOptimizedForBatchProcessing(boolean batchOptimized) throws SQLException;
public void setConnectionMode(boolean batchOptimized, boolean readOnlyOptimized) throws SQLException;
public boolean isOptimizedForBatchProcessing();

View File

@@ -0,0 +1,17 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.core;
/**
* Interface to validate the current domain model against the database schema.
*/
public interface DatabaseSchemaValidator {
String getDatabaseSchemaValidationError();
}

View File

@@ -7,12 +7,11 @@
*/
package org.dspace.core;
import org.dspace.content.DSpaceObject;
import org.dspace.authorize.ResourcePolicy;
import org.dspace.content.*;
import org.dspace.handle.Handle;
import org.dspace.storage.rdbms.DatabaseConfigVO;
import org.hibernate.FlushMode;
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.hibernate.Transaction;
import org.hibernate.*;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.proxy.HibernateProxyHelper;
import org.springframework.beans.factory.annotation.Autowired;
@@ -20,7 +19,6 @@ import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.orm.hibernate4.SessionFactoryUtils;
import javax.sql.DataSource;
import java.io.Serializable;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.SQLException;
@@ -37,12 +35,13 @@ public class HibernateDBConnection implements DBConnection<Session> {
private SessionFactory sessionFactory;
private boolean batchModeEnabled = false;
private boolean readOnlyEnabled = false;
@Override
public Session getSession() throws SQLException {
if(!isTransActionAlive()){
sessionFactory.getCurrentSession().beginTransaction();
configureBatchMode();
configureDatabaseMode();
}
return sessionFactory.getCurrentSession();
}
@@ -135,9 +134,10 @@ public class HibernateDBConnection implements DBConnection<Session> {
}
@Override
public void setOptimizedForBatchProcessing(final boolean batchOptimized) throws SQLException {
public void setConnectionMode(final boolean batchOptimized, final boolean readOnlyOptimized) throws SQLException {
this.batchModeEnabled = batchOptimized;
configureBatchMode();
this.readOnlyEnabled = readOnlyOptimized;
configureDatabaseMode();
}
@Override
@@ -145,9 +145,11 @@ public class HibernateDBConnection implements DBConnection<Session> {
return batchModeEnabled;
}
private void configureBatchMode() throws SQLException {
private void configureDatabaseMode() throws SQLException {
if(batchModeEnabled) {
getSession().setFlushMode(FlushMode.ALWAYS);
} else if(readOnlyEnabled) {
getSession().setFlushMode(FlushMode.MANUAL);
} else {
getSession().setFlushMode(FlushMode.AUTO);
}
@@ -162,6 +164,86 @@ public class HibernateDBConnection implements DBConnection<Session> {
*/
@Override
public <E extends ReloadableEntity> void uncacheEntity(E entity) throws SQLException {
getSession().evict(entity);
if(entity != null) {
if (entity instanceof DSpaceObject) {
DSpaceObject dso = (DSpaceObject) entity;
// The metadatavalue relation has CascadeType.ALL, so they are evicted automatically
// and we don' need to uncache the values explicitly.
if(Hibernate.isInitialized(dso.getHandles())) {
for (Handle handle : Utils.emptyIfNull(dso.getHandles())) {
uncacheEntity(handle);
}
}
if(Hibernate.isInitialized(dso.getResourcePolicies())) {
for (ResourcePolicy policy : Utils.emptyIfNull(dso.getResourcePolicies())) {
uncacheEntity(policy);
}
}
}
// ITEM
if (entity instanceof Item) {
Item item = (Item) entity;
//DO NOT uncache the submitter. This could be the current eperson. Uncaching could lead to
//LazyInitializationExceptions (see DS-3648)
if(Hibernate.isInitialized(item.getBundles())) {
for (Bundle bundle : Utils.emptyIfNull(item.getBundles())) {
uncacheEntity(bundle);
}
}
// BUNDLE
} else if (entity instanceof Bundle) {
Bundle bundle = (Bundle) entity;
if(Hibernate.isInitialized(bundle.getBitstreams())) {
for (Bitstream bitstream : Utils.emptyIfNull(bundle.getBitstreams())) {
uncacheEntity(bitstream);
}
}
// BITSTREAM
// No specific child entities to decache
// COMMUNITY
} else if (entity instanceof Community) {
Community community = (Community) entity;
// We don't uncache groups as they might still be referenced from the Context object
if(Hibernate.isInitialized(community.getLogo())) {
uncacheEntity(community.getLogo());
}
// COLLECTION
} else if (entity instanceof Collection) {
Collection collection = (Collection) entity;
//We don't uncache groups as they might still be referenced from the Context object
if(Hibernate.isInitialized(collection.getLogo())) {
uncacheEntity(collection.getLogo());
}
if(Hibernate.isInitialized(collection.getTemplateItem())) {
uncacheEntity(collection.getTemplateItem());
}
}
// Unless this object exists in the session, we won't do anything
if(getSession().contains(entity)) {
// If our Session has unsaved changes (dirty) and not READ-ONLY
if(!readOnlyEnabled && getSession().isDirty()) {
// write changes to database (don't worry if transaction fails, flushed changes will be rolled back)
getSession().flush();
}
// Remove object from Session
getSession().evict(entity);
}
}
}
}
}

View File

@@ -0,0 +1,38 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.core;
import org.hibernate.HibernateException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.orm.hibernate4.LocalSessionFactoryBean;
/**
* Database schema validation when using the Hibernate persistence layer
*/
public class HibernateDatabaseSchemaValidator implements DatabaseSchemaValidator {
@Autowired
private ApplicationContext applicationContext;
public String getDatabaseSchemaValidationError() {
String validationError = "";
try {
applicationContext.getBean(LocalSessionFactoryBean.class);
} catch (org.springframework.beans.factory.BeanCreationException ex) {
//The hibernate validation exception is the cause of this BeanCreationException
validationError = ex.getCause() == null ? ex.getMessage() : ex.getCause().getMessage();
} catch (HibernateException ex) {
validationError = ex.getMessage();
}
return validationError;
}
}

View File

@@ -16,16 +16,13 @@ import java.math.BigInteger;
import java.rmi.dgc.VMID;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.Arrays;
import java.util.Random;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.Date;
import java.util.Calendar;
import java.util.GregorianCalendar;
import java.text.SimpleDateFormat;
import java.text.ParseException;
import com.coverity.security.Escape;
import org.apache.commons.collections.CollectionUtils;
import org.apache.log4j.Logger;
/**
@@ -413,4 +410,8 @@ public final class Utils
int rl = result.length();
return result.substring(0, rl-2) + ":" + result.substring(rl-2);
}
public static <E> Collection<E> emptyIfNull(Collection<E> collection) {
return collection == null ? Collections.<E>emptyList() : collection;
}
}

View File

@@ -0,0 +1,18 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.core.exception;
/**
* Runtime exception that indicates that there is a database schema validation problem.
*/
public class DatabaseSchemaValidationException extends RuntimeException {
public DatabaseSchemaValidationException(final String message) {
super(message);
}
}

View File

@@ -19,8 +19,6 @@ import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.*;
import org.dspace.content.factory.ContentServiceFactory;
@@ -28,6 +26,8 @@ import org.dspace.content.service.BitstreamService;
import org.dspace.curate.AbstractCurationTask;
import org.dspace.curate.Curator;
import org.dspace.curate.Suspendable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** ClamScan.java
*
@@ -55,7 +55,7 @@ public class ClamScan extends AbstractCurationTask
protected final String SCAN_FAIL_MESSAGE = "Error encountered using virus service - check setup";
protected final String NEW_ITEM_HANDLE = "in workflow";
private static Logger log = Logger.getLogger(ClamScan.class);
private static final Logger log = LoggerFactory.getLogger(ClamScan.class);
protected String host = null;
protected int port = 0;
@@ -231,18 +231,18 @@ public class ClamScan extends AbstractCurationTask
}
}
/** scan
*
/** A buffer to hold chunks of an input stream to be scanned for viruses. */
final byte[] buffer = new byte[DEFAULT_CHUNK_SIZE];
/**
* Issue the INSTREAM command and return the response to
* and from the clamav daemon
* and from the clamav daemon.
*
* @param the bitstream for reporting results
* @param the InputStream to read
* @param the item handle for reporting results
* @param bitstream the bitstream for reporting results
* @param inputstream the InputStream to read
* @param itemHandle the item handle for reporting results
* @return a ScanResult representing the server response
* @throws IOException if IO error
*/
final byte[] buffer = new byte[DEFAULT_CHUNK_SIZE];;
protected int scan(Bitstream bitstream, InputStream inputstream, String itemHandle)
{
try
@@ -251,7 +251,7 @@ public class ClamScan extends AbstractCurationTask
}
catch (IOException e)
{
log.error("Error writing INSTREAM command . . .");
log.error("Error writing INSTREAM command", e);
return Curator.CURATE_ERROR;
}
int read = DEFAULT_CHUNK_SIZE;
@@ -263,7 +263,7 @@ public class ClamScan extends AbstractCurationTask
}
catch (IOException e)
{
log.error("Failed attempting to read the InputStream . . . ");
log.error("Failed attempting to read the InputStream", e);
return Curator.CURATE_ERROR;
}
if (read == -1)
@@ -277,7 +277,7 @@ public class ClamScan extends AbstractCurationTask
}
catch (IOException e)
{
log.error("Could not write to the socket . . . ");
log.error("Could not write to the socket", e);
return Curator.CURATE_ERROR;
}
}
@@ -288,7 +288,7 @@ public class ClamScan extends AbstractCurationTask
}
catch (IOException e)
{
log.error("Error writing zero-length chunk to socket") ;
log.error("Error writing zero-length chunk to socket", e) ;
return Curator.CURATE_ERROR;
}
try
@@ -298,7 +298,7 @@ public class ClamScan extends AbstractCurationTask
}
catch (IOException e)
{
log.error( "Error reading result from socket");
log.error( "Error reading result from socket", e);
return Curator.CURATE_ERROR;
}
@@ -306,7 +306,7 @@ public class ClamScan extends AbstractCurationTask
{
String response = new String(buffer, 0, read);
logDebugMessage("Response: " + response);
if (response.indexOf("FOUND") != -1)
if (response.contains("FOUND"))
{
String itemMsg = "item - " + itemHandle + ": ";
String bsMsg = "bitstream - " + bitstream.getName() +

View File

@@ -7,16 +7,7 @@
*/
package org.dspace.curate;
import java.io.BufferedReader;
import java.io.FileReader;
import java.util.Iterator;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.cli.*;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.core.Context;
import org.dspace.core.factory.CoreServiceFactory;
@@ -25,6 +16,10 @@ import org.dspace.eperson.EPerson;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService;
import java.io.BufferedReader;
import java.io.FileReader;
import java.util.Iterator;
/**
* CurationCli provides command-line access to Curation tools and processes.
*
@@ -142,7 +137,7 @@ public class CurationCli
}
EPersonService ePersonService = EPersonServiceFactory.getInstance().getEPersonService();
Context c = new Context();
Context c = new Context(Context.Mode.BATCH_EDIT);
if (ePersonName != null)
{
EPerson ePerson = ePersonService.findByEmail(c, ePersonName);

View File

@@ -52,12 +52,12 @@ public class Curator
// transaction scopes
public static enum TxScope { OBJECT, CURATION, OPEN };
private static Logger log = Logger.getLogger(Curator.class);
private static final Logger log = Logger.getLogger(Curator.class);
protected static final ThreadLocal<Context> curationCtx = new ThreadLocal<Context>();
protected static final ThreadLocal<Context> curationCtx = new ThreadLocal<>();
protected Map<String, TaskRunner> trMap = new HashMap<String, TaskRunner>();
protected List<String> perfList = new ArrayList<String>();
protected Map<String, TaskRunner> trMap = new HashMap<>();
protected List<String> perfList = new ArrayList<>();
protected TaskQueue taskQ = null;
protected String reporter = null;
protected Invoked iMode = null;
@@ -177,8 +177,12 @@ public class Curator
* Performs all configured tasks upon object identified by id. If
* the object can be resolved as a handle, the DSO will be the
* target object.
*
* @param c a Dpace context
*
* <p>
* Note: this method has the side-effect of setting this instance's Context
* reference. The setting is retained on return.
*
* @param c a DSpace context
* @param id an object identifier
* @throws IOException if IO error
*/
@@ -230,9 +234,10 @@ public class Curator
* <P>
* Note: Site-wide tasks will default to running as
* an Anonymous User unless you call the Site-wide task
* via the 'curate(Context,String)' method with an
* via the {@link curate(Context,String)} or
* {@link #curate(Context, DSpaceObject)} method with an
* authenticated Context object.
*
*
* @param dso the DSpace object
* @throws IOException if IO error
*/
@@ -265,7 +270,26 @@ public class Curator
}
}
}
/**
* Performs all configured tasks upon DSpace object
* (Community, Collection or Item).
*
* <p>
* Note: this method has the side-effect of setting this instance's Context
* reference. The setting is retained on return.
*
* @param c session context in which curation takes place.
* @param dso the single object to be curated.
* @throws java.io.IOException passed through.
*/
public void curate(Context c, DSpaceObject dso)
throws IOException
{
curationCtx.set(c);
curate(dso);
}
/**
* Places a curation request for the object identified by id on a
* managed queue named by the queueId.
@@ -483,10 +507,14 @@ public class Curator
{
return false;
}
Iterator<Item> iter = itemService.findByCollection(curationContext(), coll);
Context context = curationContext();
Iterator<Item> iter = itemService.findByCollection(context, coll);
while (iter.hasNext())
{
if (! tr.run(iter.next()))
Item item = iter.next();
boolean shouldContinue = tr.run(item);
context.uncacheEntity(item);
if (!shouldContinue)
{
return false;
}

View File

@@ -1,81 +0,0 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.discovery;
import org.apache.log4j.Logger;
import org.apache.solr.common.util.ContentStreamBase;
import org.dspace.content.Bitstream;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.BitstreamService;
import org.dspace.core.Context;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import java.sql.SQLException;
/**
* Construct a <code>ContentStream</code> from a <code>File</code>
*/
public class BitstreamContentStream extends ContentStreamBase
{
private static final Logger log = Logger.getLogger(BitstreamContentStream.class);
protected final Context context;
protected final Bitstream file;
protected BitstreamService bitstreamService;
public BitstreamContentStream(Context context, Bitstream f ) throws SQLException {
file = f;
this.context = context;
contentType = f.getFormat(context).getMIMEType();
name = file.getName();
size = file.getSize();
sourceInfo = file.getName();
bitstreamService = ContentServiceFactory.getInstance().getBitstreamService();
}
@Override
public String getContentType() {
if(contentType==null) {
InputStream stream = null;
try {
stream = bitstreamService.retrieve(context, file);
char first = (char)stream.read();
if(first == '<') {
return "application/xml";
}
if(first == '{') {
return "application/json";
}
} catch(Exception ex) {
log.error("Error determining content type for bitstream:" + file.getID(), ex);
} finally {
if (stream != null) try {
stream.close();
} catch (IOException ioe) {
log.error("Error closing stream:" + file.getID(), ioe);
}
}
}
return contentType;
}
@Override
public InputStream getStream() throws IOException {
try {
return bitstreamService.retrieve(context, file);
} catch (Exception e) {
log.error(e.getMessage(),e);
return new ByteArrayInputStream(e.getMessage().getBytes(StandardCharsets.UTF_8));
}
}
}

View File

@@ -0,0 +1,212 @@
/**
* The contents of this file are subject to the license and copyright
* detailed in the LICENSE and NOTICE files at the root of the source
* tree and available online at
*
* http://www.dspace.org/license/
*/
package org.dspace.discovery;
import com.google.common.base.Function;
import com.google.common.collect.Iterables;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.io.Charsets;
import org.apache.commons.lang3.StringUtils;
import org.apache.log4j.Logger;
import org.apache.solr.common.util.ContentStreamBase;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Bitstream;
import org.dspace.content.BitstreamFormat;
import org.dspace.content.Bundle;
import org.dspace.content.Item;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.BitstreamService;
import org.dspace.core.Context;
import javax.annotation.Nullable;
import java.io.*;
import java.nio.charset.StandardCharsets;
import java.sql.SQLException;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import static org.dspace.core.Utils.emptyIfNull;
/**
* Construct a <code>ContentStream</code> from a <code>File</code>
*/
public class FullTextContentStreams extends ContentStreamBase
{
private static final Logger log = Logger.getLogger(FullTextContentStreams.class);
public static final String FULLTEXT_BUNDLE = "TEXT";
protected final Context context;
protected List<FullTextBitstream> fullTextStreams;
protected BitstreamService bitstreamService;
public FullTextContentStreams(Context context, Item parentItem) throws SQLException {
this.context = context;
init(parentItem);
}
protected void init(Item parentItem) {
fullTextStreams = new LinkedList<>();
if(parentItem != null) {
sourceInfo = parentItem.getHandle();
//extracted full text is always extracted as plain text
contentType = "text/plain";
buildFullTextList(parentItem);
}
}
private void buildFullTextList(Item parentItem) {
// now get full text of any bitstreams in the TEXT bundle
// trundle through the bundles
List<Bundle> myBundles = parentItem.getBundles();
for (Bundle myBundle : emptyIfNull(myBundles)) {
if (StringUtils.equals(FULLTEXT_BUNDLE, myBundle.getName())) {
// a-ha! grab the text out of the bitstreams
List<Bitstream> bitstreams = myBundle.getBitstreams();
for (Bitstream fulltextBitstream : emptyIfNull(bitstreams)) {
fullTextStreams.add(new FullTextBitstream(sourceInfo, fulltextBitstream));
log.debug("Added BitStream: "
+ fulltextBitstream.getStoreNumber() + " "
+ fulltextBitstream.getSequenceID() + " "
+ fulltextBitstream.getName());
}
}
}
}
@Override
public String getName() {
return StringUtils.join(Iterables.transform(fullTextStreams, new Function<FullTextBitstream, String>() {
@Nullable
@Override
public String apply(@Nullable FullTextBitstream input) {
return input == null ? "" : input.getFileName();
}
}), ";");
}
@Override
public Long getSize() {
long result = 0;
if(CollectionUtils.isNotEmpty(fullTextStreams)) {
Iterable<Long> individualSizes = Iterables.transform(fullTextStreams, new Function<FullTextBitstream, Long>() {
@Nullable
@Override
public Long apply(@Nullable FullTextBitstream input) {
return input == null ? 0L : input.getSize();
}
});
for (Long size : individualSizes) {
result += size;
}
}
return result;
}
@Override
public Reader getReader() throws IOException {
return super.getReader();
}
@Override
public InputStream getStream() throws IOException {
try {
return new SequenceInputStream(new FullTextEnumeration(fullTextStreams.iterator()));
} catch (Exception e) {
log.error("Unable to add full text bitstreams to SOLR for item " + sourceInfo + ": " + e.getMessage(), e);
return new ByteArrayInputStream((e.getClass() + ": " + e.getMessage()).getBytes(StandardCharsets.UTF_8));
}
}
public boolean isEmpty() {
return CollectionUtils.isEmpty(fullTextStreams);
}
private BitstreamService getBitstreamService() {
if(bitstreamService == null) {
bitstreamService = ContentServiceFactory.getInstance().getBitstreamService();
}
return bitstreamService;
}
private class FullTextBitstream {
private String itemHandle;
private Bitstream bitstream;
public FullTextBitstream(final String parentHandle, final Bitstream file) {
this.itemHandle = parentHandle;
this.bitstream = file;
}
public String getContentType(final Context context) throws SQLException {
BitstreamFormat format = bitstream.getFormat(context);
return format == null ? null : StringUtils.trimToEmpty(format.getMIMEType());
}
public String getFileName() {
return StringUtils.trimToEmpty(bitstream.getName());
}
public long getSize() {
return bitstream.getSize();
}
public InputStream getInputStream() throws SQLException, IOException, AuthorizeException {
return getBitstreamService().retrieve(context, bitstream);
}
public String getItemHandle() {
return itemHandle;
}
}
private class FullTextEnumeration implements Enumeration<InputStream> {
private final Iterator<FullTextBitstream> fulltextIterator;
public FullTextEnumeration(final Iterator<FullTextBitstream> fulltextStreams) {
this.fulltextIterator = fulltextStreams;
}
public boolean hasMoreElements() {
return fulltextIterator.hasNext();
}
public InputStream nextElement() {
InputStream inputStream = null;
FullTextBitstream bitstream = null;
try {
bitstream = fulltextIterator.next();
inputStream = bitstream.getInputStream();
} catch (Exception e) {
log.warn("Unable to add full text bitstream " + (bitstream == null ? "NULL" :
bitstream.getFileName() + " for item " + bitstream.getItemHandle())
+ " to SOLR:" + e.getMessage(), e);
inputStream = new ByteArrayInputStream((e.getClass() + ": " + e.getMessage()).getBytes(StandardCharsets.UTF_8));
}
return inputStream == null ? null : new SequenceInputStream(
new ByteArrayInputStream("\n".getBytes(Charsets.UTF_8)), inputStream);
}
}
}

View File

@@ -47,7 +47,7 @@ public class IndexClient {
*/
public static void main(String[] args) throws SQLException, IOException, SearchServiceException {
Context context = new Context();
Context context = new Context(Context.Mode.READ_ONLY);
context.turnOffAuthorisationSystem();
String usage = "org.dspace.discovery.IndexClient [-cbhf] | [-r <handle>] | [-i <handle>] or nothing to update/clean an existing index.";
@@ -142,8 +142,6 @@ public class IndexClient {
throw new IllegalArgumentException("Cannot resolve " + handle + " to a DSpace object");
}
log.info("Forcibly Indexing " + handle);
// Enable batch mode; we may be indexing a large number of items
context.enableBatchMode(true);
final long startTimeMillis = System.currentTimeMillis();
final long count = indexAll(indexer, ContentServiceFactory.getInstance().getItemService(), context, dso);
final long seconds = (System.currentTimeMillis() - startTimeMillis ) / 1000;

View File

@@ -160,11 +160,12 @@ public class IndexEventConsumer implements Consumer {
if (objectsToUpdate != null && handlesToDelete != null) {
// update the changed Items not deleted because they were on create list
for (DSpaceObject iu : objectsToUpdate) {
for (DSpaceObject o : objectsToUpdate) {
/* we let all types through here and
* allow the search indexer to make
* decisions on indexing and/or removal
*/
DSpaceObject iu = ctx.reloadEntity(o);
String hdl = iu.getHandle();
if (hdl != null && !handlesToDelete.contains(hdl)) {
try {

View File

@@ -761,23 +761,22 @@ public class SolrServiceImpl implements SearchService, IndexingService {
/**
* Write the document to the index under the appropriate handle.
*
* @param doc the solr document to be written to the server
* @param doc
* the solr document to be written to the server
* @param streams
* @throws IOException IO exception
* list of bitstream content streams
* @throws IOException
* A general class of exceptions produced by failed or interrupted I/O operations.
*/
protected void writeDocument(SolrInputDocument doc, List<BitstreamContentStream> streams) throws IOException {
protected void writeDocument(SolrInputDocument doc, FullTextContentStreams streams) throws IOException {
try {
if(getSolr() != null)
{
if(CollectionUtils.isNotEmpty(streams))
if (streams != null && !streams.isEmpty())
{
ContentStreamUpdateRequest req = new ContentStreamUpdateRequest("/update/extract");
for(BitstreamContentStream bce : streams)
{
req.addContentStream(bce);
}
req.addContentStream(streams);
ModifiableSolrParams params = new ModifiableSolrParams();
@@ -1410,48 +1409,6 @@ public class SolrServiceImpl implements SearchService, IndexingService {
log.debug(" Added Grouping");
List<BitstreamContentStream> streams = new ArrayList<BitstreamContentStream>();
try {
// now get full text of any bitstreams in the TEXT bundle
// trundle through the bundles
List<Bundle> myBundles = item.getBundles();
for (Bundle myBundle : myBundles)
{
if ((myBundle.getName() != null)
&& myBundle.getName().equals("TEXT"))
{
// a-ha! grab the text out of the bitstreams
List<Bitstream> bitstreams = myBundle.getBitstreams();
for (Bitstream myBitstream : bitstreams)
{
try {
streams.add(new BitstreamContentStream(context, myBitstream));
log.debug(" Added BitStream: "
+ myBitstream.getStoreNumber() + " "
+ myBitstream.getSequenceID() + " "
+ myBitstream.getName());
} catch (Exception e)
{
// this will never happen, but compiler is now
// happy.
log.trace(e.getMessage(), e);
}
}
}
}
} catch (RuntimeException e)
{
log.error(e.getMessage(), e);
}
//Do any additional indexing, depends on the plugins
List<SolrServiceIndexPlugin> solrServiceIndexPlugins = DSpaceServicesFactory.getInstance().getServiceManager().getServicesByType(SolrServiceIndexPlugin.class);
for (SolrServiceIndexPlugin solrServiceIndexPlugin : solrServiceIndexPlugins)
@@ -1461,7 +1418,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
// write the index and close the inputstreamreaders
try {
writeDocument(doc, streams);
writeDocument(doc, new FullTextContentStreams(context, item));
log.info("Wrote Item: " + handle + " to Index");
} catch (RuntimeException e)
{

View File

@@ -8,32 +8,27 @@
package org.dspace.discovery;
import org.apache.commons.collections.CollectionUtils;
import org.apache.log4j.Logger;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.common.SolrInputDocument;
import org.dspace.authorize.ResourcePolicy;
import org.dspace.authorize.service.AuthorizeService;
import org.dspace.authorize.service.ResourcePolicyService;
import org.dspace.content.DSpaceObject;
import org.dspace.content.service.CollectionService;
import org.dspace.content.service.CommunityService;
import org.dspace.authorize.service.ResourcePolicyService;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.DSpaceObject;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.LogManager;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
import org.dspace.eperson.service.GroupService;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.springframework.beans.factory.annotation.Autowired;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.UUID;
import org.dspace.services.factory.DSpaceServicesFactory;
import java.util.Set;
/**
* Restriction plugin that ensures that indexes all the resource policies.
@@ -74,6 +69,9 @@ public class SolrServiceResourceRestrictionPlugin implements SolrServiceIndexPlu
}
document.addField("read", fieldValue);
//remove the policy from the cache to save memory
context.uncacheEntity(resourcePolicy);
}
} catch (SQLException e) {
log.error(LogManager.getHeader(context, "Error while indexing resource policies", "DSpace object: (id " + dso.getID() + " type " + dso.getType() + ")"));
@@ -98,7 +96,7 @@ public class SolrServiceResourceRestrictionPlugin implements SolrServiceIndexPlu
}
//Retrieve all the groups the current user is a member of !
List<Group> groups = groupService.allMemberGroups(context, currentUser);
Set<Group> groups = groupService.allMemberGroupsSet(context, currentUser);
for (Group group : groups) {
resourceQuery.append(" OR g").append(group.getID());
}

View File

@@ -114,7 +114,7 @@ public class EmbargoCLITool {
Context context = null;
try
{
context = new Context();
context = new Context(Context.Mode.BATCH_EDIT);
context.turnOffAuthorisationSystem();
Date now = new Date();
@@ -148,10 +148,12 @@ public class EmbargoCLITool {
Iterator<Item> ii = embargoService.findItemsByLiftMetadata(context);
while (ii.hasNext())
{
if (processOneItem(context, ii.next(), line, now))
Item item = ii.next();
if (processOneItem(context, item, line, now))
{
status = 1;
}
context.uncacheEntity(item);
}
}
context.complete();

View File

@@ -138,13 +138,10 @@ public class EmbargoServiceImpl implements EmbargoService
+ result.toString());
}
// sanity check: do not allow an embargo lift date in the past.
if (liftDate.before(new Date()))
{
throw new IllegalArgumentException(
"Embargo lift date must be in the future, but this is in the past: "
+ result.toString());
}
/*
* NOTE: We do not check here for past dates as it can result in errors during AIP restoration.
* Therefore, UIs should perform any such date validation on input. See DS-3348
*/
return result;
}

View File

@@ -16,6 +16,7 @@ import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.EPersonService;
import org.hibernate.annotations.CacheConcurrencyStrategy;
import org.hibernate.proxy.HibernateProxyHelper;
import javax.persistence.*;
@@ -31,6 +32,8 @@ import java.util.List;
* @version $Revision$
*/
@Entity
@Cacheable
@org.hibernate.annotations.Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, include = "non-lazy")
@Table(name = "eperson")
public class EPerson extends DSpaceObject implements DSpaceObjectLegacySupport
{

View File

@@ -16,6 +16,7 @@ import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.eperson.factory.EPersonServiceFactory;
import org.dspace.eperson.service.GroupService;
import org.hibernate.annotations.CacheConcurrencyStrategy;
import org.hibernate.proxy.HibernateProxyHelper;
import javax.persistence.*;
@@ -29,6 +30,8 @@ import java.util.List;
* @author David Stuve
*/
@Entity
@Cacheable
@org.hibernate.annotations.Cache(usage = CacheConcurrencyStrategy.NONSTRICT_READ_WRITE, include = "non-lazy")
@Table(name = "epersongroup" )
public class Group extends DSpaceObject implements DSpaceObjectLegacySupport
{

View File

@@ -7,6 +7,7 @@
*/
package org.dspace.eperson;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.hibernate.proxy.HibernateProxyHelper;
import javax.persistence.*;
@@ -79,4 +80,12 @@ public class Group2GroupCache implements Serializable {
}
return true;
}
@Override
public int hashCode() {
return new org.apache.commons.lang.builder.HashCodeBuilder()
.append(parent == null ? "" : parent.getID())
.append(child == null ? "" : child.getID())
.toHashCode();
}
}

View File

@@ -8,6 +8,7 @@
package org.dspace.eperson;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang.ObjectUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.dspace.authorize.AuthorizeConfiguration;
@@ -156,41 +157,92 @@ public class GroupServiceImpl extends DSpaceObjectServiceImpl<Group> implements
return owningGroup.contains(childGroup);
}
@Override
public boolean isParentOf(Context context, Group parentGroup, Group childGroup) throws SQLException {
return group2GroupCacheDAO.findByParentAndChild(context, parentGroup, childGroup) != null;
}
@Override
public boolean isMember(Context context, Group group) throws SQLException {
return isMember(context, group.getName());
return isMember(context, context.getCurrentUser(), group);
}
@Override
public boolean isMember(Context context, EPerson ePerson, Group group)
throws SQLException
{
if(group == null) {
return false;
// special, everyone is member of group 0 (anonymous)
} else if (StringUtils.equals(group.getName(), Group.ANONYMOUS)) {
return true;
} else {
Boolean cachedGroupMembership = context.getCachedGroupMembership(group, ePerson);
if(cachedGroupMembership != null) {
return cachedGroupMembership.booleanValue();
} else {
boolean isMember = false;
//If we have an ePerson, check we can find membership in the database
if(ePerson != null) {
//lookup eperson in normal groups and subgroups with 1 query
isMember = isEPersonInGroup(context, group, ePerson);
}
//If we did not find the group membership in the database, check the special groups.
//If there are special groups we need to check direct membership or check if the
//special group is a subgroup of the provided group.
//Note that special groups should only be checked if the current user == the ePerson.
//This also works for anonymous users (ePerson == null) if IP authentication used
if(!isMember && CollectionUtils.isNotEmpty(context.getSpecialGroups()) && isAuthenticatedUser(context, ePerson)) {
Iterator<Group> it = context.getSpecialGroups().iterator();
while (it.hasNext() && !isMember) {
Group specialGroup = it.next();
//Check if the special group matches the given group or if it is a subgroup (with 1 query)
if (specialGroup.equals(group) || isParentOf(context, group, specialGroup)) {
isMember = true;
}
}
}
context.cacheGroupMembership(group, ePerson, isMember);
return isMember;
}
}
}
private boolean isAuthenticatedUser(final Context context, final EPerson ePerson) {
return ObjectUtils.equals(context.getCurrentUser(), ePerson);
}
@Override
public boolean isMember(final Context context, final String groupName) throws SQLException {
// special, everyone is member of group 0 (anonymous)
if (StringUtils.equals(groupName, Group.ANONYMOUS))
{
return true;
} else if(context.getCurrentUser() != null) {
EPerson currentUser = context.getCurrentUser();
return isMember(context, findByName(context, groupName));
}
//First check the special groups
List<Group> specialGroups = context.getSpecialGroups();
if(CollectionUtils.isNotEmpty(specialGroups)) {
for (Group specialGroup : specialGroups)
{
//Check if the current special group is the one we are looking for OR retrieve all groups & make a check here.
if(StringUtils.equals(specialGroup.getName(), groupName) || allMemberGroups(context, currentUser).contains(findByName(context, groupName)))
{
return true;
}
}
}
//lookup eperson in normal groups and subgroups
return epersonInGroup(context, groupName, currentUser);
} else {
return false;
}
@Override
public boolean isMember(final Context context, EPerson eperson, final String groupName) throws SQLException {
return isMember(context, eperson, findByName(context, groupName));
}
@Override
public List<Group> allMemberGroups(Context context, EPerson ePerson) throws SQLException {
return new ArrayList<>(allMemberGroupsSet(context, ePerson));
}
@Override
public Set<Group> allMemberGroupsSet(Context context, EPerson ePerson) throws SQLException {
Set<Group> cachedGroupMembership = context.getCachedAllMemberGroupsSet(ePerson);
if(cachedGroupMembership != null) {
return cachedGroupMembership;
}
Set<Group> groups = new HashSet<>();
if (ePerson != null)
@@ -216,7 +268,6 @@ public class GroupServiceImpl extends DSpaceObjectServiceImpl<Group> implements
// all the users are members of the anonymous group
groups.add(findByName(context, Group.ANONYMOUS));
List<Group2GroupCache> groupCache = group2GroupCacheDAO.findByChildren(context, groups);
// now we have all owning groups, also grab all parents of owning groups
// yes, I know this could have been done as one big query and a union,
@@ -225,7 +276,8 @@ public class GroupServiceImpl extends DSpaceObjectServiceImpl<Group> implements
groups.add(group2GroupCache.getParent());
}
return new ArrayList<>(groups);
context.cacheAllMemberGroupsSet(ePerson, groups);
return groups;
}
@Override
@@ -462,10 +514,10 @@ public class GroupServiceImpl extends DSpaceObjectServiceImpl<Group> implements
protected boolean epersonInGroup(Context context, String groupName, EPerson ePerson)
protected boolean isEPersonInGroup(Context context, Group group, EPerson ePerson)
throws SQLException
{
return groupDAO.findByNameAndMembership(context, groupName, ePerson) != null;
return groupDAO.findByIdAndMembership(context, group.getID(), ePerson) != null;
}

View File

@@ -289,7 +289,7 @@ public class SubscribeCLITool {
Context context = null;
try {
context = new Context();
context = new Context(Context.Mode.READ_ONLY);
processDaily(context, test);
context.complete();
} catch (Exception e) {

View File

@@ -14,7 +14,6 @@ import org.dspace.eperson.Group2GroupCache;
import java.sql.SQLException;
import java.util.List;
import java.util.Set;
/**
* Database Access Object interface class for the Group2GroupCache object.
@@ -27,7 +26,9 @@ public interface Group2GroupCacheDAO extends GenericDAO<Group2GroupCache> {
public List<Group2GroupCache> findByParent(Context context, Group group) throws SQLException;
public List<Group2GroupCache> findByChildren(Context context, Set<Group> groups) throws SQLException;
public List<Group2GroupCache> findByChildren(Context context, Iterable<Group> groups) throws SQLException;
public Group2GroupCache findByParentAndChild(Context context, Group parent, Group child) throws SQLException;
public Group2GroupCache find(Context context, Group parent, Group child) throws SQLException;

View File

@@ -121,11 +121,11 @@ public interface GroupDAO extends DSpaceObjectDAO<Group>, DSpaceObjectLegacySupp
/**
* Find a group by its name and the membership of the given EPerson
* @param context The DSpace context
* @param groupName The name of the group to look for
* @param id The id of the group to look for
* @param ePerson The EPerson which has to be a member
* @return The group with the specified name
* @throws SQLException if database error
*/
Group findByNameAndMembership(Context context, String groupName, EPerson ePerson) throws SQLException;
Group findByIdAndMembership(Context context, UUID id, EPerson ePerson) throws SQLException;
}

View File

@@ -43,6 +43,8 @@ public class EPersonDAOImpl extends AbstractHibernateDSODAO<EPerson> implements
// All email addresses are stored as lowercase, so ensure that the email address is lowercased for the lookup
Criteria criteria = createCriteria(context, EPerson.class);
criteria.add(Restrictions.eq("email", email.toLowerCase()));
criteria.setCacheable(true);
return uniqueResult(criteria);
}
@@ -52,6 +54,8 @@ public class EPersonDAOImpl extends AbstractHibernateDSODAO<EPerson> implements
{
Criteria criteria = createCriteria(context, EPerson.class);
criteria.add(Restrictions.eq("netid", netid));
criteria.setCacheable(true);
return uniqueResult(criteria);
}

View File

@@ -13,6 +13,7 @@ import org.dspace.eperson.Group;
import org.dspace.eperson.Group2GroupCache;
import org.dspace.eperson.dao.Group2GroupCacheDAO;
import org.hibernate.Criteria;
import org.hibernate.Query;
import org.hibernate.criterion.Disjunction;
import org.hibernate.criterion.Restrictions;
@@ -44,7 +45,7 @@ public class Group2GroupCacheDAOImpl extends AbstractHibernateDAO<Group2GroupCac
}
@Override
public List<Group2GroupCache> findByChildren(Context context, Set<Group> groups) throws SQLException {
public List<Group2GroupCache> findByChildren(Context context, Iterable<Group> groups) throws SQLException {
Criteria criteria = createCriteria(context, Group2GroupCache.class);
Disjunction orDisjunction = Restrictions.or();
@@ -59,6 +60,19 @@ public class Group2GroupCacheDAOImpl extends AbstractHibernateDAO<Group2GroupCac
return list(criteria);
}
@Override
public Group2GroupCache findByParentAndChild(Context context, Group parent, Group child) throws SQLException {
Query query = createQuery(context,
"FROM Group2GroupCache g WHERE g.parent = :parentGroup AND g.child = :childGroup");
query.setParameter("parentGroup", parent);
query.setParameter("childGroup", child);
query.setCacheable(true);
return singleResult(query);
}
@Override
public Group2GroupCache find(Context context, Group parent, Group child) throws SQLException {
Criteria criteria = createCriteria(context, Group2GroupCache.class);

View File

@@ -101,25 +101,25 @@ public class GroupDAOImpl extends AbstractHibernateDSODAO<Group> implements Grou
}
@Override
public Group findByNameAndMembership(Context context, String groupName, EPerson ePerson) throws SQLException {
if(groupName == null || ePerson == null) {
public Group findByIdAndMembership(Context context, UUID id, EPerson ePerson) throws SQLException {
if(id == null || ePerson == null) {
return null;
} else {
Query query = createQuery(context,
"SELECT DISTINCT g FROM Group g " +
"LEFT JOIN g.epeople p " +
"WHERE g.name = :name AND " +
"WHERE g.id = :id AND " +
"(p.id = :eperson_id OR " +
"EXISTS ( " +
"SELECT 1 FROM Group2GroupCache gc " +
"JOIN gc.parent p " +
"JOIN gc.child c " +
"JOIN c.epeople cp " +
"WHERE p.id = g.id AND cp.id = :eperson_id " +
"JOIN gc.parent parent " +
"JOIN gc.child child " +
"JOIN child.epeople cp " +
"WHERE parent.id = g.id AND cp.id = :eperson_id " +
") " +
")");
query.setParameter("name", groupName);
query.setParameter("id", id);
query.setParameter("eperson_id", ePerson.getID());
query.setCacheable(true);

View File

@@ -7,9 +7,6 @@
*/
package org.dspace.eperson.service;
import java.sql.SQLException;
import java.util.List;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.MetadataField;
import org.dspace.content.service.DSpaceObjectLegacySupportService;
@@ -18,6 +15,10 @@ import org.dspace.core.Context;
import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group;
import java.sql.SQLException;
import java.util.List;
import java.util.Set;
/**
* Service interface class for the Group object.
* The implementation of this class is responsible for all business logic calls for the Group object and is autowired by spring
@@ -116,6 +117,15 @@ public interface GroupService extends DSpaceObjectService<Group>, DSpaceObjectLe
*/
public boolean isMember(Group owningGroup, Group childGroup);
/**
* Check to see if parentGroup is a direct or in-direct parent of a childGroup.
*
* @param parentGroup parent group
* @param childGroup child group
* @return true or false
*/
public boolean isParentOf(Context context, Group parentGroup, Group childGroup) throws SQLException;
/**
* fast check to see if an eperson is a member called with eperson id, does
* database lookup without instantiating all of the epeople objects and is
@@ -133,7 +143,8 @@ public interface GroupService extends DSpaceObjectService<Group>, DSpaceObjectLe
/**
* fast check to see if an eperson is a member called with eperson id, does
* database lookup without instantiating all of the epeople objects and is
* thus a static method
* thus a static method. This method uses context.getCurrentUser() as
* eperson whos membership should be checked.
*
* @param context
* context
@@ -144,6 +155,34 @@ public interface GroupService extends DSpaceObjectService<Group>, DSpaceObjectLe
*/
public boolean isMember(Context context, String groupName) throws SQLException;
/**
* fast check to see if an eperson is a member called with eperson id, does
* database lookup without instantiating all of the epeople objects and is
* thus a static method. The eperson whos membership should be checked must
* be defined as method attribute.
*
* @param context
* context
* @param groupName
* the name of the group to check
* @return true or false
* @throws SQLException if database error
*/
public boolean isMember(Context context, EPerson epersonToCheck, String groupName) throws SQLException;
/**
* fast check to see if an eperson is a member called with eperson id, does
* database lookup without instantiating all of the epeople objects and is
* thus a static method
*
* @param context DSpace context object.
* @param eperson EPerson whos membership should be checked.
* @param group The group to check against.
* @return true or false
* @throws SQLException if database error
*/
public boolean isMember(Context context, EPerson eperson, Group group) throws SQLException;
/**
* Get all of the groups that an eperson is a member of.
*
@@ -154,6 +193,8 @@ public interface GroupService extends DSpaceObjectService<Group>, DSpaceObjectLe
*/
public List<Group> allMemberGroups(Context context, EPerson ePerson) throws SQLException;
Set<Group> allMemberGroupsSet(Context context, EPerson ePerson) throws SQLException;
/**
* Get all of the epeople who are a member of the
* specified group, or a member of a sub-group of the

View File

@@ -231,6 +231,10 @@ public class HandleServiceImpl implements HandleService
// can verify during a restore whether the same *type* of resource
// is reusing this handle!
handle.setDSpaceObject(null);
//Also remove the handle from the DSO list to keep a consistent model
dso.getHandles().remove(handle);
handleDAO.save(context, handle);
if(log.isDebugEnabled())
@@ -241,7 +245,7 @@ public class HandleServiceImpl implements HandleService
}
else
{
log.warn("Cannot find Handle entry to unbind for object " + Constants.typeText[dso.getType()] + " id=" + dso.getID());
log.trace("Cannot find Handle entry to unbind for object " + Constants.typeText[dso.getType()] + " id=" + dso.getID() + ". Handle could have been unbinded before.");
}
}

View File

@@ -7,10 +7,6 @@
*/
package org.dspace.handle;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.sql.SQLException;
import java.util.List;
import org.apache.log4j.Logger;
import org.dspace.content.MetadataValue;
import org.dspace.content.factory.ContentServiceFactory;
@@ -19,6 +15,13 @@ import org.dspace.core.Context;
import org.dspace.discovery.IndexClient;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.handle.service.HandleService;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.sql.SQLException;
import java.util.Iterator;
/**
* A script to update the handle values in the database. This is typically used
@@ -32,6 +35,7 @@ public class UpdateHandlePrefix
{
private static final Logger log = Logger.getLogger(UpdateHandlePrefix.class);
private static final ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
/**
* When invoked as a command-line tool, updates handle prefix
@@ -94,12 +98,19 @@ public class UpdateHandlePrefix
System.out.print("Updating metadatavalues table... ");
MetadataValueService metadataValueService = ContentServiceFactory.getInstance().getMetadataValueService();
List<MetadataValue> metadataValues = metadataValueService.findByValueLike(context, "http://hdl.handle.net/");
int updMeta = metadataValues.size();
for (MetadataValue metadataValue : metadataValues) {
metadataValue.setValue(metadataValue.getValue().replace("http://hdl.handle.net/" + oldH, "http://hdl.handle.net/" + newH));
String handlePrefix = configurationService.getProperty("handle.canonical.prefix");
Iterator<MetadataValue> metadataValues = metadataValueService.findByValueLike(context, handlePrefix + oldH);
int updMeta = 0;
while(metadataValues.hasNext()) {
MetadataValue metadataValue = metadataValues.next();
metadataValue.setValue(metadataValue.getValue().replace(handlePrefix + oldH, handlePrefix + newH));
metadataValueService.update(context, metadataValue, true);
context.uncacheEntity(metadataValue);
updMeta++;
}
System.out.println(
updMeta + " metadata value" + ((updMeta > 1) ? "s" : "") + " updated"
);

View File

@@ -7,12 +7,9 @@
*/
package org.dspace.handle.dao.impl;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import org.dspace.content.DSpaceObject;
import org.dspace.core.Context;
import org.dspace.core.AbstractHibernateDAO;
import org.dspace.core.Context;
import org.dspace.handle.Handle;
import org.dspace.handle.dao.HandleDAO;
import org.hibernate.Criteria;
@@ -23,6 +20,9 @@ import org.hibernate.jdbc.ReturningWork;
import org.hibernate.service.jdbc.dialect.internal.StandardDialectResolver;
import org.hibernate.service.jdbc.dialect.spi.DialectResolver;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.Collections;
import java.util.List;
@@ -93,9 +93,10 @@ public class HandleDAOImpl extends AbstractHibernateDAO<Handle> implements Handl
@Override
public int updateHandlesWithNewPrefix(Context context, String newPrefix, String oldPrefix) throws SQLException
{
String hql = "UPDATE Handle set handle = concat(:newPrefix,'/',id) WHERE handle like concat(:oldPrefix,'%')";
String hql = "UPDATE Handle set handle = concat(:newPrefix, '/', substring(handle, :oldPrefixLength + 2)) WHERE handle like concat(:oldPrefix,'%')";
Query query = createQuery(context, hql);
query.setString("newPrefix", newPrefix);
query.setInteger("oldPrefixLength", oldPrefix.length());
query.setString("oldPrefix", oldPrefix);
return query.executeUpdate();
}

View File

@@ -215,8 +215,8 @@ public class OAIHarvester {
*/
public void runHarvest() throws SQLException, IOException, AuthorizeException
{
boolean originalMode = ourContext.isBatchModeEnabled();
ourContext.enableBatchMode(true);
Context.Mode originalMode = ourContext.getCurrentMode();
ourContext.setMode(Context.Mode.BATCH_EDIT);
// figure out the relevant parameters
String oaiSource = harvestRow.getOaiSource();
@@ -432,7 +432,7 @@ public class OAIHarvester {
log.info("Harvest from " + oaiSource + " successful. The process took " + timeTaken + " milliseconds. Harvested " + currentRecord + " items.");
harvestedCollection.update(ourContext, harvestRow);
ourContext.enableBatchMode(originalMode);
ourContext.setMode(originalMode);
}
private void intermediateCommit() throws SQLException {
@@ -625,6 +625,11 @@ public class OAIHarvester {
log.info(String.format("Item %s (%s) has been ingested (item %d of %d). The whole process took: %d ms.",
item.getHandle(), item.getID(), currentRecord, totalListSize, timeTaken));
//Clear the context cache
ourContext.uncacheEntity(wi);
ourContext.uncacheEntity(hi);
ourContext.uncacheEntity(item);
// Stop ignoring authorization
ourContext.restoreAuthSystemState();
}

View File

@@ -8,28 +8,12 @@
package org.dspace.identifier.doi;
import java.io.IOException;
import java.io.PrintStream;
import java.sql.SQLException;
import java.util.*;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.cli.*;
import org.apache.log4j.Logger;
import org.dspace.content.DSpaceObject;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.ItemService;
import org.dspace.core.ConfigurationManager;
import org.dspace.core.Constants;
import org.dspace.core.Context;
import org.dspace.core.Email;
import org.dspace.core.I18nUtil;
import org.dspace.core.*;
import org.dspace.handle.factory.HandleServiceFactory;
import org.dspace.handle.service.HandleService;
import org.dspace.identifier.DOI;
@@ -39,6 +23,11 @@ import org.dspace.identifier.factory.IdentifierServiceFactory;
import org.dspace.identifier.service.DOIService;
import org.dspace.utils.DSpace;
import java.io.IOException;
import java.io.PrintStream;
import java.sql.SQLException;
import java.util.*;
/**
*
@@ -203,6 +192,7 @@ public class DOIOrganiser {
for (DOI doi : dois) {
organiser.reserve(doi);
context.uncacheEntity(doi);
}
} catch (SQLException ex) {
System.err.println("Error in database connection:" + ex.getMessage());
@@ -223,6 +213,7 @@ public class DOIOrganiser {
for (DOI doi : dois)
{
organiser.register(doi);
context.uncacheEntity(doi);
}
} catch (SQLException ex) {
System.err.println("Error in database connection:" + ex.getMessage());
@@ -247,6 +238,7 @@ public class DOIOrganiser {
for (DOI doi : dois)
{
organiser.update(doi);
context.uncacheEntity(doi);
}
} catch (SQLException ex) {
System.err.println("Error in database connection:" + ex.getMessage());
@@ -270,6 +262,7 @@ public class DOIOrganiser {
DOI doi = iterator.next();
iterator.remove();
organiser.delete(doi.getDoi());
context.uncacheEntity(doi);
}
} catch (SQLException ex) {
System.err.println("Error in database connection:" + ex.getMessage());

View File

@@ -320,7 +320,7 @@ public class RDFConsumer implements Consumer
// create a new context, to be sure to work as anonymous user
// we don't want to store private data in a triplestore with public
// SPARQL endpoint.
ctx = new Context(Context.READ_ONLY);
ctx = new Context(Context.Mode.READ_ONLY);
if (toDelete == null)
{
log.debug("Deletion queue does not exists, creating empty queue.");

View File

@@ -9,29 +9,11 @@
package org.dspace.rdf;
import com.hp.hpl.jena.rdf.model.Model;
import java.io.PrintWriter;
import java.sql.SQLException;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.CopyOnWriteArraySet;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
import org.apache.commons.cli.*;
import org.apache.commons.lang3.StringUtils;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException;
import org.dspace.content.Collection;
import org.dspace.content.Community;
import org.dspace.content.DSpaceObject;
import org.dspace.content.Item;
import org.dspace.content.Site;
import org.dspace.content.*;
import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CommunityService;
import org.dspace.content.service.ItemService;
@@ -44,6 +26,14 @@ import org.dspace.rdf.storage.RDFStorage;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
import java.io.PrintWriter;
import java.sql.SQLException;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.CopyOnWriteArraySet;
/**
* This class manages the handling of RDF data in DSpace. It generates
* identifiers, it loads data, it manages the conversion of DSpace Objects into
@@ -84,7 +74,7 @@ public class RDFizer {
this.dryrun = false;
this.lang = "TURTLE";
this.processed = new CopyOnWriteArraySet<UUID>();
this.context = new Context(Context.READ_ONLY);
this.context = new Context(Context.Mode.READ_ONLY);
this.configurationService = DSpaceServicesFactory.getInstance().getConfigurationService();
this.contentServiceFactory = ContentServiceFactory.getInstance();
@@ -443,8 +433,9 @@ public class RDFizer {
// }
callback.callback(dso);
report("Processed " + contentServiceFactory.getDSpaceObjectService(dso).getTypeText(dso) + " " + dso.getID()
report("Processed " + contentServiceFactory.getDSpaceObjectService(dso).getTypeText(dso) + " " + dso.getID()
+ " (handle " + dso.getHandle() + ").");
context.uncacheEntity(dso);
}
protected boolean isProcessed(DSpaceObject dso)
@@ -796,7 +787,7 @@ public class RDFizer {
// data into a triple store that provides a public sparql endpoint.
// all exported rdf data can be read by anonymous users.
// We won't change the database => read_only context will assure this.
Context context = new Context(Context.READ_ONLY);
Context context = new Context(Context.Mode.READ_ONLY);
RDFizer myself = null;
myself = new RDFizer();

View File

@@ -25,13 +25,17 @@ import org.apache.solr.client.solrj.impl.HttpSolrServer;
import org.apache.solr.client.solrj.request.AbstractUpdateRequest;
import org.apache.solr.client.solrj.request.ContentStreamUpdateRequest;
import org.apache.solr.client.solrj.request.CoreAdminRequest;
import org.apache.solr.client.solrj.request.LukeRequest;
import org.apache.solr.client.solrj.response.FacetField;
import org.apache.solr.client.solrj.response.LukeResponse;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.client.solrj.response.RangeFacet;
import org.apache.solr.client.solrj.response.SolrPingResponse;
import org.apache.solr.client.solrj.util.ClientUtils;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.luke.FieldFlag;
import org.apache.solr.common.params.*;
import org.dspace.content.*;
import org.dspace.content.Collection;
@@ -72,7 +76,9 @@ import java.util.*;
public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBean
{
private static final Logger log = Logger.getLogger(SolrLoggerServiceImpl.class);
private static final String MULTIPLE_VALUES_SPLITTER = "|";
protected HttpSolrServer solr;
public static final String DATE_FORMAT_8601 = "yyyy-MM-dd'T'HH:mm:ss.SSS'Z'";
@@ -84,7 +90,8 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
protected boolean useProxies;
private static List<String> statisticYearCores = new ArrayList<String>();
private static boolean statisticYearCoresInit = false;
@Autowired(required = true)
protected BitstreamService bitstreamService;
@Autowired(required = true)
@@ -126,28 +133,6 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
try
{
server = new HttpSolrServer(configurationService.getProperty("solr-statistics.server"));
//Attempt to retrieve all the statistic year cores
File solrDir = new File(configurationService.getProperty("dspace.dir") + File.separator + "solr" + File.separator);
File[] solrCoreFiles = solrDir.listFiles(new FileFilter() {
@Override
public boolean accept(File file) {
//Core name example: statistics-2008
return file.getName().matches("statistics-\\d\\d\\d\\d");
}
});
//Base url should like : http://localhost:{port.number}/solr
String baseSolrUrl = server.getBaseURL().replace("statistics", "");
for (File solrCoreFile : solrCoreFiles) {
log.info("Loading core with name: " + solrCoreFile.getName());
createCore(server, solrCoreFile.getName());
//Add it to our cores list so we can query it !
statisticYearCores.add(baseSolrUrl.replace("http://", "").replace("https://", "") + solrCoreFile.getName());
}
//Also add the core containing the current year !
statisticYearCores.add(server.getBaseURL().replace("http://", "").replace("https://", ""));
} catch (Exception e) {
log.error(e.getMessage(), e);
}
@@ -201,6 +186,7 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
{
return;
}
initSolrYearCores();
try
@@ -240,6 +226,7 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
if (solr == null || locationService == null) {
return;
}
initSolrYearCores();
try {
SolrInputDocument doc1 = getCommonSolrDoc(dspaceObject, ip, userAgent, xforwardedfor,
@@ -465,6 +452,7 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
{
SolrInputDocument solrDoc = getCommonSolrDoc(resultObject, request, currentUser);
if (solrDoc == null) return;
initSolrYearCores();
for (String query : queries) {
solrDoc.addField("query", query);
@@ -511,6 +499,7 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
@Override
public void postWorkflow(UsageWorkflowEvent usageWorkflowEvent) throws SQLException {
initSolrYearCores();
try {
SolrInputDocument solrDoc = getCommonSolrDoc(usageWorkflowEvent.getObject(), null, null);
@@ -1236,8 +1225,12 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
yearQueryParams.put(CommonParams.FQ, filterQuery.toString());
yearQueryParams.put(CommonParams.WT, "csv");
//Tell SOLR how to escape and separate the values of multi-valued fields
yearQueryParams.put("csv.escape", "\\");
yearQueryParams.put("csv.mv.separator", MULTIPLE_VALUES_SPLITTER);
//Start by creating a new core
String coreName = "statistics-" + dcStart.getYear();
String coreName = "statistics-" + dcStart.getYearUTC();
HttpSolrServer statisticsYearServer = createCore(solr, coreName);
System.out.println("Moving: " + totalRecords + " into core " + coreName);
@@ -1252,7 +1245,7 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
HttpResponse response = new DefaultHttpClient().execute(get);
InputStream csvInputstream = response.getEntity().getContent();
//Write the csv ouput to a file !
File csvFile = new File(tempDirectory.getPath() + File.separatorChar + "temp." + dcStart.getYear() + "." + i + ".csv");
File csvFile = new File(tempDirectory.getPath() + File.separatorChar + "temp." + dcStart.getYearUTC() + "." + i + ".csv");
FileUtils.copyInputStreamToFile(csvInputstream, csvFile);
filesToUpload.add(csvFile);
@@ -1260,16 +1253,26 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
yearQueryParams.put(CommonParams.START, String.valueOf((i + 10000)));
}
Set<String> multivaluedFields = getMultivaluedFieldNames();
for (File tempCsv : filesToUpload) {
//Upload the data in the csv files to our new solr core
ContentStreamUpdateRequest contentStreamUpdateRequest = new ContentStreamUpdateRequest("/update/csv");
contentStreamUpdateRequest.setParam("stream.contentType", "text/plain;charset=utf-8");
contentStreamUpdateRequest.setParam("escape", "\\");
contentStreamUpdateRequest.setParam("skip", "_version_");
contentStreamUpdateRequest.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true);
contentStreamUpdateRequest.addFile(tempCsv, "text/plain;charset=utf-8");
//Add parsing directives for the multivalued fields so that they are stored as separate values instead of one value
for (String multivaluedField : multivaluedFields) {
contentStreamUpdateRequest.setParam("f." + multivaluedField + ".split", Boolean.TRUE.toString());
contentStreamUpdateRequest.setParam("f." + multivaluedField + ".separator", MULTIPLE_VALUES_SPLITTER);
}
statisticsYearServer.request(contentStreamUpdateRequest);
}
statisticsYearServer.commit(true, true);
@@ -1286,14 +1289,61 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
protected HttpSolrServer createCore(HttpSolrServer solr, String coreName) throws IOException, SolrServerException {
String solrDir = configurationService.getProperty("dspace.dir") + File.separator + "solr" +File.separator;
String baseSolrUrl = solr.getBaseURL().replace("statistics", "");
//DS-3458: Test to see if a solr core already exists. If it exists, return that server. Otherwise create a new one.
HttpSolrServer returnServer = new HttpSolrServer(baseSolrUrl + "/" + coreName);
try {
SolrPingResponse ping = returnServer.ping();
log.debug(String.format("Ping of Solr Core [%s] Returned with Status [%d]", coreName, ping.getStatus()));
return returnServer;
} catch(Exception e) {
log.debug(String.format("Ping of Solr Core [%s] Failed with [%s]. New Core Will be Created", coreName, e.getClass().getName()));
}
//Unfortunately, this class is documented as "experimental and subject to change" on the Lucene website.
//http://lucene.apache.org/solr/4_4_0/solr-solrj/org/apache/solr/client/solrj/request/CoreAdminRequest.html
CoreAdminRequest.Create create = new CoreAdminRequest.Create();
create.setCoreName(coreName);
//The config files for a statistics shard reside wihtin the statistics repository
create.setInstanceDir("statistics");
create.setDataDir(solrDir + coreName + File.separator + "data");
//It is unclear why a separate solr server using the baseSolrUrl is required.
//Based on testing while working on DS-3457, this appears to be necessary.
HttpSolrServer solrServer = new HttpSolrServer(baseSolrUrl);
//DS-3457: The invocation of this method will cause tomcat to hang if this method is invoked before the solr webapp has fully initialized.
//Also, any attempt to ping a repository before solr is fully initialized will also cause tomcat to hang.
create.process(solrServer);
log.info("Created core with name: " + coreName);
return new HttpSolrServer(baseSolrUrl + "/" + coreName);
return returnServer;
}
/**
* Retrieves a list of all the multi valued fields in the solr core
* @return all fields tagged as multivalued
* @throws SolrServerException When getting the schema information from the SOLR core fails
* @throws IOException When connection to the SOLR server fails
*/
public Set<String> getMultivaluedFieldNames() throws SolrServerException, IOException {
Set<String> multivaluedFields = new HashSet<String>();
LukeRequest lukeRequest = new LukeRequest();
lukeRequest.setShowSchema(true);
LukeResponse process = lukeRequest.process(solr);
Map<String, LukeResponse.FieldInfo> fields = process.getFieldInfo();
for(String fieldName : fields.keySet())
{
LukeResponse.FieldInfo fieldInfo = fields.get(fieldName);
EnumSet<FieldFlag> flags = fieldInfo.getFlags();
for(FieldFlag fieldFlag : flags)
{
if(fieldFlag.getAbbreviation() == FieldFlag.MULTI_VALUED.getAbbreviation())
{
multivaluedFields.add(fieldName);
}
}
}
return multivaluedFields;
}
@@ -1516,10 +1566,49 @@ public class SolrLoggerServiceImpl implements SolrLoggerService, InitializingBea
protected void addAdditionalSolrYearCores(SolrQuery solrQuery){
//Only add if needed
initSolrYearCores();
if(0 < statisticYearCores.size()){
//The shards are a comma separated list of the urls to the cores
solrQuery.add(ShardParams.SHARDS, StringUtils.join(statisticYearCores.iterator(), ","));
}
}
/*
* The statistics shards should not be initialized until all tomcat webapps are fully initialized.
* DS-3457 uncovered an issue in DSpace 6x in which this code triggered tomcat to hang when statistics shards are present.
* This code is synchonized in the event that 2 threads trigger the initialization at the same time.
*/
protected synchronized void initSolrYearCores() {
if (statisticYearCoresInit) {
return;
}
try
{
//Attempt to retrieve all the statistic year cores
File solrDir = new File(configurationService.getProperty("dspace.dir") + File.separator + "solr" + File.separator);
File[] solrCoreFiles = solrDir.listFiles(new FileFilter() {
@Override
public boolean accept(File file) {
//Core name example: statistics-2008
return file.getName().matches("statistics-\\d\\d\\d\\d");
}
});
//Base url should like : http://localhost:{port.number}/solr
String baseSolrUrl = solr.getBaseURL().replace("statistics", "");
for (File solrCoreFile : solrCoreFiles) {
log.info("Loading core with name: " + solrCoreFile.getName());
createCore(solr, solrCoreFile.getName());
//Add it to our cores list so we can query it !
statisticYearCores.add(baseSolrUrl.replace("http://", "").replace("https://", "") + solrCoreFile.getName());
}
//Also add the core containing the current year !
statisticYearCores.add(solr.getBaseURL().replace("http://", "").replace("https://", ""));
} catch (Exception e) {
log.error(e.getMessage(), e);
}
statisticYearCoresInit = true;
}
}

View File

@@ -12,6 +12,7 @@ import org.apache.commons.lang.StringUtils;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.util.ClientUtils;
import org.dspace.content.DSpaceObject;
import org.dspace.content.DSpaceObjectLegacySupport;
import org.dspace.core.Context;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.dspace.statistics.Dataset;
@@ -201,8 +202,12 @@ public class StatisticsDataSearches extends StatisticsData {
protected String getQuery() {
String query;
if(currentDso != null){
query = "scopeType: " + currentDso.getType() + " AND scopeId: " + currentDso.getID();
query = "scopeType: " + currentDso.getType() + " AND ";
if(currentDso instanceof DSpaceObjectLegacySupport){
query += " (scopeId:" + currentDso.getID() + " OR scopeId:" + ((DSpaceObjectLegacySupport) currentDso).getLegacyId() + ")";
}else{
query += "scopeId:" + currentDso.getID();
}
}else{
query = "*:*";
}

View File

@@ -26,6 +26,7 @@ import org.dspace.statistics.factory.StatisticsServiceFactory;
import org.dspace.statistics.service.SolrLoggerService;
import org.dspace.statistics.util.LocationUtils;
import org.dspace.core.Context;
import org.dspace.core.I18nUtil;
import org.dspace.core.Constants;
import org.dspace.core.ConfigurationManager;
import org.dspace.app.util.Util;
@@ -487,6 +488,9 @@ public class StatisticsDataVisits extends StatisticsData
//TODO: CHANGE & THROW AWAY THIS ENTIRE METHOD
//Check if int
String dsoId;
//DS 3602: Until all legacy stats records have been upgraded to using UUID,
//duplicate reports may be presented for each DSO. A note will be appended when reporting legacy counts.
String legacyNote = "";
int dsoLength = query.getDsoLength();
try {
dsoId = UUID.fromString(value).toString();
@@ -494,6 +498,7 @@ public class StatisticsDataVisits extends StatisticsData
try {
//Legacy identifier support
dsoId = String.valueOf(Integer.parseInt(value));
legacyNote = I18nUtil.getMessage("org.dspace.statistics.content.StatisticsDataVisits.legacy", context);
} catch (NumberFormatException e1) {
dsoId = null;
}
@@ -511,7 +516,7 @@ public class StatisticsDataVisits extends StatisticsData
{
break;
}
return value;
return bit.getName() + legacyNote;
case Constants.ITEM:
Item item = itemService.findByIdOrLegacyId(context, dsoId);
if(item == null)
@@ -532,7 +537,7 @@ public class StatisticsDataVisits extends StatisticsData
}
}
return name;
return name + legacyNote;
case Constants.COLLECTION:
Collection coll = collectionService.findByIdOrLegacyId(context, dsoId);
@@ -549,7 +554,7 @@ public class StatisticsDataVisits extends StatisticsData
name = name.substring(0, firstSpace) + " ...";
}
}
return name;
return name + legacyNote;
case Constants.COMMUNITY:
Community comm = communityService.findByIdOrLegacyId(context, dsoId);
@@ -566,7 +571,7 @@ public class StatisticsDataVisits extends StatisticsData
name = name.substring(0, firstSpace) + " ...";
}
}
return name;
return name + legacyNote;
}
}
}
@@ -805,8 +810,11 @@ public class StatisticsDataVisits extends StatisticsData
if(dso != null)
{
query += (query.equals("") ? "" : " AND ");
//DS-3602: For clarity, adding "id:" to the right hand side of the search
//In the solr schema, "id" has been declared as the defaultSearchField so the field name is optional
if(dso instanceof DSpaceObjectLegacySupport){
query += " (id:" + dso.getID() + " OR " + ((DSpaceObjectLegacySupport) dso).getLegacyId() + ")";
query += " (id:" + dso.getID() + " OR id:" + ((DSpaceObjectLegacySupport) dso).getLegacyId() + ")";
}else{
query += "id:" + dso.getID();
}
@@ -828,7 +836,13 @@ public class StatisticsDataVisits extends StatisticsData
owningStr = "owningComm";
break;
}
owningStr += ":" + currentDso.getID();
if(currentDso instanceof DSpaceObjectLegacySupport){
owningStr = "(" + owningStr + ":" + currentDso.getID() + " OR "
+ owningStr + ":" + ((DSpaceObjectLegacySupport) currentDso).getLegacyId() + ")";
}else{
owningStr += ":" + currentDso.getID();
}
query += owningStr;
}

Some files were not shown because too many files have changed in this diff Show More