From 51d8874a8fdf9488615aab7d80e7597f5b0d2341 Mon Sep 17 00:00:00 2001 From: jensroets Date: Thu, 8 Sep 2022 16:50:02 +0200 Subject: [PATCH 001/125] 94299 Multiple Bitstream deletion endpoint --- .../app/rest/RestResourceController.java | 33 + .../repository/BitstreamRestRepository.java | 44 + .../rest/repository/DSpaceRestRepository.java | 18 + .../app/rest/BitstreamRestRepositoryIT.java | 955 ++++++++++++++++++ 4 files changed, 1050 insertions(+) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/RestResourceController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/RestResourceController.java index b82b483075..24468660f0 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/RestResourceController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/RestResourceController.java @@ -7,6 +7,7 @@ */ package org.dspace.app.rest; +import static org.dspace.app.rest.utils.ContextUtil.obtainContext; import static org.dspace.app.rest.utils.RegexUtils.REGEX_REQUESTMAPPING_IDENTIFIER_AS_DIGIT; import static org.dspace.app.rest.utils.RegexUtils.REGEX_REQUESTMAPPING_IDENTIFIER_AS_HEX32; import static org.dspace.app.rest.utils.RegexUtils.REGEX_REQUESTMAPPING_IDENTIFIER_AS_STRING_VERSION_STRONG; @@ -55,6 +56,8 @@ import org.dspace.app.rest.repository.LinkRestRepository; import org.dspace.app.rest.utils.RestRepositoryUtils; import org.dspace.app.rest.utils.Utils; import org.dspace.authorize.AuthorizeException; +import org.dspace.content.DSpaceObject; +import org.dspace.core.Context; import org.dspace.util.UUIDUtils; import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; @@ -1050,6 +1053,13 @@ public class RestResourceController implements InitializingBean { return deleteInternal(apiCategory, model, uuid); } + @RequestMapping(method = RequestMethod.DELETE, consumes = {"text/uri-list"}) + public ResponseEntity> delete(HttpServletRequest request, @PathVariable String apiCategory, + @PathVariable String model) + throws HttpRequestMethodNotSupportedException { + return deleteUriListInternal(request, apiCategory, model); + } + /** * Internal method to delete resource. * @@ -1067,6 +1077,29 @@ public class RestResourceController implements InitializingBean { return ControllerUtils.toEmptyResponse(HttpStatus.NO_CONTENT); } + public ResponseEntity> deleteUriListInternal( + HttpServletRequest request, + String apiCategory, + String model) + throws HttpRequestMethodNotSupportedException { + checkModelPluralForm(apiCategory, model); + DSpaceRestRepository repository = utils.getResourceRepository(apiCategory, model); + Context context = obtainContext(request); + List dsoStringList = utils.getStringListFromRequest(request); + List dsoList = utils.constructDSpaceObjectList(context, dsoStringList); + if (dsoStringList.size() != dsoList.size()) { + throw new ResourceNotFoundException("One or more bitstreams could not be found."); + } + try { + repository.delete(dsoList); + } catch (ClassCastException e) { + log.error("Something went wrong whilst creating the object for apiCategory: " + apiCategory + + " and model: " + model, e); + return ControllerUtils.toEmptyResponse(HttpStatus.INTERNAL_SERVER_ERROR); + } + return ControllerUtils.toEmptyResponse(HttpStatus.NO_CONTENT); + } + /** * Execute a PUT request for an entity with id of type UUID; * diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java index ae3cf91d4c..f599d993be 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java @@ -10,6 +10,8 @@ package org.dspace.app.rest.repository; import java.io.IOException; import java.io.InputStream; import java.sql.SQLException; +import java.util.ArrayList; +import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.UUID; @@ -147,6 +149,48 @@ public class BitstreamRestRepository extends DSpaceObjectRestRepository dsoList) + throws SQLException, AuthorizeException { + // check if list is empty + if (dsoList.isEmpty()) { + throw new ResourceNotFoundException("No bitstreams given."); + } + // check if every DSO is a Bitstream + if (dsoList.stream().anyMatch(dso -> !(dso instanceof Bitstream))) { + throw new UnprocessableEntityException("Not all given items are bitstreams."); + } + // check that they're all part of the same Item + List items = new ArrayList<>(); + for (DSpaceObject dso : dsoList) { + Bitstream bit = bs.find(context, dso.getID()); + DSpaceObject bitstreamParent = bs.getParentObject(context, bit); + if (bit == null) { + throw new ResourceNotFoundException("The bitstream with uuid " + dso.getID() + " could not be found"); + } + // we have to check if the bitstream has already been deleted + if (bit.isDeleted()) { + throw new UnprocessableEntityException("The bitstream with uuid " + bit.getID() + + " was already deleted"); + } else { + items.add(bitstreamParent); + } + } + if (items.stream().distinct().count() > 1) { + throw new UnprocessableEntityException("Not all given items are part of the same Item."); + } + // delete all Bitstreams + Iterator iterator = dsoList.iterator(); + while (iterator.hasNext()) { + Bitstream bit = (Bitstream) iterator.next(); + try { + bs.delete(context, bit); + } catch (SQLException | IOException e) { + throw new RuntimeException(e.getMessage(), e); + } + } + } + /** * Find the bitstream for the provided handle and sequence or filename. * When a bitstream can be found with the sequence ID it will be returned if the user has "METADATA_READ" access. diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DSpaceRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DSpaceRestRepository.java index 01f127eca5..219b7c4123 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DSpaceRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DSpaceRestRepository.java @@ -26,6 +26,7 @@ import org.dspace.app.rest.model.ItemRest; import org.dspace.app.rest.model.RestAddressableModel; import org.dspace.app.rest.model.patch.Patch; import org.dspace.authorize.AuthorizeException; +import org.dspace.content.DSpaceObject; import org.dspace.content.service.MetadataFieldService; import org.dspace.core.Context; import org.springframework.beans.factory.BeanNameAware; @@ -256,6 +257,23 @@ public abstract class DSpaceRestRepository dsoList) { + Context context = obtainContext(); + try { + getThisRepository().deleteList(context, dsoList); + context.commit(); + } catch (AuthorizeException e) { + throw new RESTAuthorizationException(e); + } catch (SQLException ex) { + throw new RuntimeException(ex.getMessage(), ex); + } + } + + protected void deleteList(Context context, List list) + throws AuthorizeException, SQLException, RepositoryMethodNotImplementedException { + throw new RepositoryMethodNotImplementedException("No implementation found; Method not allowed!", ""); + } + @Override /** * This method cannot be implemented we required all the find method to be paginated diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java index f9c1e469fc..391d9e4193 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java @@ -13,6 +13,7 @@ import static org.dspace.core.Constants.WRITE; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; +import static org.springframework.data.rest.webmvc.RestMediaTypes.TEXT_URI_LIST; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; @@ -1201,6 +1202,960 @@ public class BitstreamRestRepositoryIT extends AbstractControllerIntegrationTest .andExpect(status().isNotFound()); } + @Test + public void deleteListOneBitstream() throws Exception { + + //We turn off the authorization system in order to create the structure as defined below + context.turnOffAuthorisationSystem(); + + //** GIVEN ** + //1. A community-collection structure with one parent community with sub-community and one collection. + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); + + //2. One public items that is readable by Anonymous + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Test") + .withIssueDate("2010-10-17") + .withAuthor("Smith, Donald") + .withSubject("ExtraEntry") + .build(); + + String bitstreamContent = "ThisIsSomeDummyText"; + //Add a bitstream to an item + Bitstream bitstream = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream") + .withDescription("Description") + .withMimeType("text/plain") + .build(); + } + + context.restoreAuthSystemState(); + + String token = getAuthToken(admin.getEmail(), password); + + // Delete + getClient(token).perform(delete("/api/core/bitstreams") + .contentType(TEXT_URI_LIST) + .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream.getID())) + .andExpect(status().is(204)); + + // Verify 404 after delete + getClient(token).perform(get("/api/core/bitstreams/" + bitstream.getID())) + .andExpect(status().isNotFound()); + } + + @Test + public void deleteListOneOfMultipleBitstreams() throws Exception { + + //We turn off the authorization system in order to create the structure as defined below + context.turnOffAuthorisationSystem(); + + //** GIVEN ** + //1. A community-collection structure with one parent community with sub-community and one collection. + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); + + //2. One public items that is readable by Anonymous + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Test") + .withIssueDate("2010-10-17") + .withAuthor("Smith, Donald") + .withSubject("ExtraEntry") + .build(); + + // Add 3 bitstreams to the item + String bitstreamContent1 = "ThisIsSomeDummyText1"; + Bitstream bitstream1 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent1, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream1") + .withDescription("Description1") + .withMimeType("text/plain") + .build(); + } + + String bitstreamContent2 = "ThisIsSomeDummyText2"; + Bitstream bitstream2 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent2, CharEncoding.UTF_8)) { + bitstream2 = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream2") + .withDescription("Description2") + .withMimeType("text/plain") + .build(); + } + + String bitstreamContent3 = "ThisIsSomeDummyText3"; + Bitstream bitstream3 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent3, CharEncoding.UTF_8)) { + bitstream3 = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream3") + .withDescription("Description3") + .withMimeType("text/plain") + .build(); + } + + context.restoreAuthSystemState(); + + String token = getAuthToken(admin.getEmail(), password); + + // Delete bitstream1 + getClient(token).perform(delete("/api/core/bitstreams") + .contentType(TEXT_URI_LIST) + .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream1.getID())) + .andExpect(status().is(204)); + + // Verify 404 after delete for bitstream1 + getClient(token).perform(get("/api/core/bitstreams/" + bitstream1.getID())) + .andExpect(status().isNotFound()); + + // check that bitstream2 still exists + getClient().perform(get("/api/core/bitstreams/" + bitstream2.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", HalMatcher.matchNoEmbeds())); + + // check that bitstream3 still exists + getClient().perform(get("/api/core/bitstreams/" + bitstream3.getID())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", HalMatcher.matchNoEmbeds())) + ; + } + + @Test + public void deleteListAllBitstreams() throws Exception { + + //We turn off the authorization system in order to create the structure as defined below + context.turnOffAuthorisationSystem(); + + //** GIVEN ** + //1. A community-collection structure with one parent community with sub-community and one collection. + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); + + //2. One public items that is readable by Anonymous + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Test") + .withIssueDate("2010-10-17") + .withAuthor("Smith, Donald") + .withSubject("ExtraEntry") + .build(); + + // Add 3 bitstreams to the item + String bitstreamContent1 = "ThisIsSomeDummyText1"; + Bitstream bitstream1 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent1, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream1") + .withDescription("Description1") + .withMimeType("text/plain") + .build(); + } + + String bitstreamContent2 = "ThisIsSomeDummyText2"; + Bitstream bitstream2 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent2, CharEncoding.UTF_8)) { + bitstream2 = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream2") + .withDescription("Description2") + .withMimeType("text/plain") + .build(); + } + + String bitstreamContent3 = "ThisIsSomeDummyText3"; + Bitstream bitstream3 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent3, CharEncoding.UTF_8)) { + bitstream3 = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream3") + .withDescription("Description3") + .withMimeType("text/plain") + .build(); + } + + context.restoreAuthSystemState(); + + String token = getAuthToken(admin.getEmail(), password); + + // Delete all bitstreams + getClient(token).perform(delete("/api/core/bitstreams") + .contentType(TEXT_URI_LIST) + .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream1.getID() + + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream2.getID() + + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream3.getID())) + .andExpect(status().is(204)); + + // Verify 404 after delete for bitstream1 + getClient(token).perform(get("/api/core/bitstreams/" + bitstream1.getID())) + .andExpect(status().isNotFound()); + + // Verify 404 after delete for bitstream2 + getClient(token).perform(get("/api/core/bitstreams/" + bitstream2.getID())) + .andExpect(status().isNotFound()); + + // Verify 404 after delete for bitstream3 + getClient(token).perform(get("/api/core/bitstreams/" + bitstream3.getID())) + .andExpect(status().isNotFound()); + } + + @Test + public void deleteListForbidden() throws Exception { + + //We turn off the authorization system in order to create the structure as defined below + context.turnOffAuthorisationSystem(); + + //** GIVEN ** + //1. A community-collection structure with one parent community with sub-community and one collection. + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); + + //2. One public items that is readable by Anonymous + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Test") + .withIssueDate("2010-10-17") + .withAuthor("Smith, Donald") + .withSubject("ExtraEntry") + .build(); + + // Add 3 bitstreams to the item + String bitstreamContent1 = "ThisIsSomeDummyText1"; + Bitstream bitstream1 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent1, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream1") + .withDescription("Description1") + .withMimeType("text/plain") + .build(); + } + + String bitstreamContent2 = "ThisIsSomeDummyText2"; + Bitstream bitstream2 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent2, CharEncoding.UTF_8)) { + bitstream2 = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream2") + .withDescription("Description2") + .withMimeType("text/plain") + .build(); + } + + String bitstreamContent3 = "ThisIsSomeDummyText3"; + Bitstream bitstream3 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent3, CharEncoding.UTF_8)) { + bitstream3 = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream3") + .withDescription("Description3") + .withMimeType("text/plain") + .build(); + } + + context.restoreAuthSystemState(); + + String token = getAuthToken(eperson.getEmail(), password); + + // Delete using an unauthorized user + getClient(token).perform(delete("/api/core/bitstreams") + .contentType(TEXT_URI_LIST) + .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream1.getID() + + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream2.getID() + + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream3.getID())) + .andExpect(status().isForbidden()); + + // Verify the bitstreams are still here + getClient().perform(get("/api/core/bitstreams/" + bitstream1.getID())) + .andExpect(status().isOk()); + + getClient().perform(get("/api/core/bitstreams/" + bitstream2.getID())) + .andExpect(status().isOk()); + + getClient().perform(get("/api/core/bitstreams/" + bitstream3.getID())) + .andExpect(status().isOk()); + } + + @Test + public void deleteListUnauthorized() throws Exception { + + //We turn off the authorization system in order to create the structure as defined below + context.turnOffAuthorisationSystem(); + + //** GIVEN ** + //1. A community-collection structure with one parent community with sub-community and one collection. + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); + + //2. One public items that is readable by Anonymous + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Test") + .withIssueDate("2010-10-17") + .withAuthor("Smith, Donald") + .withSubject("ExtraEntry") + .build(); + + // Add 3 bitstreams to the item + String bitstreamContent1 = "ThisIsSomeDummyText1"; + Bitstream bitstream1 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent1, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream1") + .withDescription("Description1") + .withMimeType("text/plain") + .build(); + } + + String bitstreamContent2 = "ThisIsSomeDummyText2"; + Bitstream bitstream2 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent2, CharEncoding.UTF_8)) { + bitstream2 = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream2") + .withDescription("Description2") + .withMimeType("text/plain") + .build(); + } + + String bitstreamContent3 = "ThisIsSomeDummyText3"; + Bitstream bitstream3 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent3, CharEncoding.UTF_8)) { + bitstream3 = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream3") + .withDescription("Description3") + .withMimeType("text/plain") + .build(); + } + + context.restoreAuthSystemState(); + + // Delete as anonymous + getClient().perform(delete("/api/core/bitstreams") + .contentType(TEXT_URI_LIST) + .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream1.getID() + + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream2.getID() + + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream3.getID())) + .andExpect(status().isUnauthorized()); + + // Verify the bitstreams are still here + getClient().perform(get("/api/core/bitstreams/" + bitstream1.getID())) + .andExpect(status().isOk()); + + getClient().perform(get("/api/core/bitstreams/" + bitstream2.getID())) + .andExpect(status().isOk()); + + getClient().perform(get("/api/core/bitstreams/" + bitstream3.getID())) + .andExpect(status().isOk()); + } + + @Test + public void deleteListEmpty() throws Exception { + + //We turn off the authorization system in order to create the structure as defined below + context.turnOffAuthorisationSystem(); + + //** GIVEN ** + //1. A community-collection structure with one parent community with sub-community and one collection. + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); + + //2. One public items that is readable by Anonymous + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Test") + .withIssueDate("2010-10-17") + .withAuthor("Smith, Donald") + .withSubject("ExtraEntry") + .build(); + + // Add 3 bitstreams to the item + String bitstreamContent1 = "ThisIsSomeDummyText1"; + Bitstream bitstream1 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent1, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream1") + .withDescription("Description1") + .withMimeType("text/plain") + .build(); + } + + String bitstreamContent2 = "ThisIsSomeDummyText2"; + Bitstream bitstream2 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent2, CharEncoding.UTF_8)) { + bitstream2 = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream2") + .withDescription("Description2") + .withMimeType("text/plain") + .build(); + } + + String bitstreamContent3 = "ThisIsSomeDummyText3"; + Bitstream bitstream3 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent3, CharEncoding.UTF_8)) { + bitstream3 = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream3") + .withDescription("Description3") + .withMimeType("text/plain") + .build(); + } + + context.restoreAuthSystemState(); + + String token = getAuthToken(admin.getEmail(), password); + + // Delete with empty list throws 404 + getClient(token).perform(delete("/api/core/bitstreams") + .contentType(TEXT_URI_LIST) + .content("")) + .andExpect(status().isNotFound()); + + // Verify the bitstreams are still here + getClient(token).perform(get("/api/core/bitstreams/" + bitstream1.getID())) + .andExpect(status().isOk()); + + getClient(token).perform(get("/api/core/bitstreams/" + bitstream2.getID())) + .andExpect(status().isOk()); + + getClient(token).perform(get("/api/core/bitstreams/" + bitstream3.getID())) + .andExpect(status().isOk()); + } + + @Test + public void deleteListNotBitstream() throws Exception { + + //We turn off the authorization system in order to create the structure as defined below + context.turnOffAuthorisationSystem(); + + //** GIVEN ** + //1. A community-collection structure with one parent community with sub-community and one collection. + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); + + //2. One public items that is readable by Anonymous + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Test") + .withIssueDate("2010-10-17") + .withAuthor("Smith, Donald") + .withSubject("ExtraEntry") + .build(); + + // Add 3 bitstreams to the item + String bitstreamContent1 = "ThisIsSomeDummyText1"; + Bitstream bitstream1 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent1, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream1") + .withDescription("Description1") + .withMimeType("text/plain") + .build(); + } + + String bitstreamContent2 = "ThisIsSomeDummyText2"; + Bitstream bitstream2 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent2, CharEncoding.UTF_8)) { + bitstream2 = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream2") + .withDescription("Description2") + .withMimeType("text/plain") + .build(); + } + + String bitstreamContent3 = "ThisIsSomeDummyText3"; + Bitstream bitstream3 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent3, CharEncoding.UTF_8)) { + bitstream3 = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream3") + .withDescription("Description3") + .withMimeType("text/plain") + .build(); + } + + context.restoreAuthSystemState(); + + String token = getAuthToken(admin.getEmail(), password); + + // Delete with list containing non-Bitstream throws 422 + getClient(token).perform(delete("/api/core/bitstreams") + .contentType(TEXT_URI_LIST) + .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream1.getID() + + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream2.getID() + + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream3.getID() + + " \n http://localhost:8080/server/api/core/items/" + publicItem1.getID())) + .andExpect(status().is(422)); + + // Verify the bitstreams are still here + getClient(token).perform(get("/api/core/bitstreams/" + bitstream1.getID())) + .andExpect(status().isOk()); + + getClient(token).perform(get("/api/core/bitstreams/" + bitstream2.getID())) + .andExpect(status().isOk()); + + getClient(token).perform(get("/api/core/bitstreams/" + bitstream3.getID())) + .andExpect(status().isOk()); + } + + @Test + public void deleteListDifferentItems() throws Exception { + + //We turn off the authorization system in order to create the structure as defined below + context.turnOffAuthorisationSystem(); + + //** GIVEN ** + //1. A community-collection structure with one parent community with sub-community and one collection. + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); + + //2. Two public items that is readable by Anonymous + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Test") + .withIssueDate("2010-10-17") + .withAuthor("Smith, Donald") + .withSubject("ExtraEntry") + .build(); + + Item publicItem2 = ItemBuilder.createItem(context, col1) + .withTitle("Test") + .withIssueDate("2010-10-17") + .withAuthor("Smith, Donald") + .withSubject("ExtraEntry") + .build(); + + // Add 1 bitstream to each item + String bitstreamContent1 = "ThisIsSomeDummyText1"; + Bitstream bitstream1 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent1, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream1") + .withDescription("Description1") + .withMimeType("text/plain") + .build(); + } + + String bitstreamContent2 = "ThisIsSomeDummyText2"; + Bitstream bitstream2 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent2, CharEncoding.UTF_8)) { + bitstream2 = BitstreamBuilder. + createBitstream(context, publicItem2, is) + .withName("Bitstream2") + .withDescription("Description2") + .withMimeType("text/plain") + .build(); + } + + context.restoreAuthSystemState(); + + String token = getAuthToken(admin.getEmail(), password); + + // Delete with list containing Bitstreams from different items throws 422 + getClient(token).perform(delete("/api/core/bitstreams") + .contentType(TEXT_URI_LIST) + .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream1.getID() + + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream2.getID())) + .andExpect(status().is(422)); + + // Verify the bitstreams are still here + getClient(token).perform(get("/api/core/bitstreams/" + bitstream1.getID())) + .andExpect(status().isOk()); + + getClient(token).perform(get("/api/core/bitstreams/" + bitstream2.getID())) + .andExpect(status().isOk()); + + } + + @Test + public void deleteListLogo() throws Exception { + // We turn off the authorization system in order to create the structure as defined below + context.turnOffAuthorisationSystem(); + + // ** GIVEN ** + // 1. A community with a logo + parentCommunity = CommunityBuilder.createCommunity(context).withName("Community").withLogo("logo_community") + .build(); + + // 2. A collection with a logo + Collection col = CollectionBuilder.createCollection(context, parentCommunity).withName("Collection") + .withLogo("logo_collection").build(); + + context.restoreAuthSystemState(); + + String token = getAuthToken(admin.getEmail(), password); + + // trying to DELETE parentCommunity logo and collection logo should work + // we have to delete them separately otherwise it will throw 422 as they belong to different items + getClient(token).perform(delete("/api/core/bitstreams") + .contentType(TEXT_URI_LIST) + .content("http://localhost:8080/server/api/core/bitstreams/" + parentCommunity.getLogo().getID())) + .andExpect(status().is(204)); + + getClient(token).perform(delete("/api/core/bitstreams") + .contentType(TEXT_URI_LIST) + .content("http://localhost:8080/server/api/core/bitstreams/" + col.getLogo().getID())) + .andExpect(status().is(204)); + + // Verify 404 after delete for parentCommunity logo + getClient(token).perform(get("/api/core/bitstreams/" + parentCommunity.getLogo().getID())) + .andExpect(status().isNotFound()); + + // Verify 404 after delete for collection logo + getClient(token).perform(get("/api/core/bitstreams/" + col.getLogo().getID())) + .andExpect(status().isNotFound()); + } + + @Test + public void deleteListMissing() throws Exception { + String token = getAuthToken(admin.getEmail(), password); + + // Delete + getClient(token).perform(delete("/api/core/bitstreams") + .contentType(TEXT_URI_LIST) + .content("http://localhost:8080/server/api/core/bitstreams/1c11f3f1-ba1f-4f36-908a-3f1ea9a557eb")) + .andExpect(status().isNotFound()); + + // Verify 404 after failed delete + getClient(token).perform(delete("/api/core/bitstreams") + .contentType(TEXT_URI_LIST) + .content("http://localhost:8080/server/api/core/bitstreams/1c11f3f1-ba1f-4f36-908a-3f1ea9a557eb")) + .andExpect(status().isNotFound()); + } + + @Test + public void deleteListOneMissing() throws Exception { + + //We turn off the authorization system in order to create the structure as defined below + context.turnOffAuthorisationSystem(); + + //** GIVEN ** + //1. A community-collection structure with one parent community with sub-community and one collection. + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); + + //2. One public items that is readable by Anonymous + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Test") + .withIssueDate("2010-10-17") + .withAuthor("Smith, Donald") + .withSubject("ExtraEntry") + .build(); + + // Add 3 bitstreams to the item + String bitstreamContent1 = "ThisIsSomeDummyText1"; + Bitstream bitstream1 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent1, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream1") + .withDescription("Description1") + .withMimeType("text/plain") + .build(); + } + + String bitstreamContent2 = "ThisIsSomeDummyText2"; + Bitstream bitstream2 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent2, CharEncoding.UTF_8)) { + bitstream2 = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream2") + .withDescription("Description2") + .withMimeType("text/plain") + .build(); + } + + String bitstreamContent3 = "ThisIsSomeDummyText3"; + Bitstream bitstream3 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent3, CharEncoding.UTF_8)) { + bitstream3 = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream3") + .withDescription("Description3") + .withMimeType("text/plain") + .build(); + } + + context.restoreAuthSystemState(); + + String token = getAuthToken(admin.getEmail(), password); + + // Delete all bitstreams and a missing bitstream returns 404 + getClient(token).perform(delete("/api/core/bitstreams") + .contentType(TEXT_URI_LIST) + .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream1.getID() + + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream2.getID() + + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream3.getID() + + " \n http://localhost:8080/server/api/core/bitstreams/1c11f3f1-ba1f-4f36-908a-3f1ea9a557eb")) + .andExpect(status().isNotFound()); + + // Verify the bitstreams are still here + getClient(token).perform(get("/api/core/bitstreams/" + bitstream1.getID())) + .andExpect(status().isOk()); + + getClient(token).perform(get("/api/core/bitstreams/" + bitstream2.getID())) + .andExpect(status().isOk()); + + getClient(token).perform(get("/api/core/bitstreams/" + bitstream3.getID())) + .andExpect(status().isOk()); + } + + @Test + public void deleteListOneMissingDifferentItems() throws Exception { + + //We turn off the authorization system in order to create the structure as defined below + context.turnOffAuthorisationSystem(); + + //** GIVEN ** + //1. A community-collection structure with one parent community with sub-community and one collection. + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); + + //2. Two public items that is readable by Anonymous + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Test") + .withIssueDate("2010-10-17") + .withAuthor("Smith, Donald") + .withSubject("ExtraEntry") + .build(); + + Item publicItem2 = ItemBuilder.createItem(context, col1) + .withTitle("Test") + .withIssueDate("2010-10-17") + .withAuthor("Smith, Donald") + .withSubject("ExtraEntry") + .build(); + + // Add 1 bitstream to each item + String bitstreamContent1 = "ThisIsSomeDummyText1"; + Bitstream bitstream1 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent1, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream1") + .withDescription("Description1") + .withMimeType("text/plain") + .build(); + } + + String bitstreamContent2 = "ThisIsSomeDummyText2"; + Bitstream bitstream2 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent2, CharEncoding.UTF_8)) { + bitstream2 = BitstreamBuilder. + createBitstream(context, publicItem2, is) + .withName("Bitstream2") + .withDescription("Description2") + .withMimeType("text/plain") + .build(); + } + + context.restoreAuthSystemState(); + + String token = getAuthToken(admin.getEmail(), password); + + // Delete all bitstreams and a missing bitstream returns 404 + getClient(token).perform(delete("/api/core/bitstreams") + .contentType(TEXT_URI_LIST) + .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream1.getID() + + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream2.getID() + + " \n http://localhost:8080/server/api/core/bitstreams/1c11f3f1-ba1f-4f36-908a-3f1ea9a557eb")) + .andExpect(status().isNotFound()); + + // Verify the bitstreams are still here + getClient(token).perform(get("/api/core/bitstreams/" + bitstream1.getID())) + .andExpect(status().isOk()); + + getClient(token).perform(get("/api/core/bitstreams/" + bitstream2.getID())) + .andExpect(status().isOk()); + + } + + @Test + public void deleteListDeleted() throws Exception { + //We turn off the authorization system in order to create the structure as defined below + context.turnOffAuthorisationSystem(); + + //** GIVEN ** + //1. A community-collection structure with one parent community with sub-community and one collection. + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); + + //2. One public items that is readable by Anonymous + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Test") + .withIssueDate("2010-10-17") + .withAuthor("Smith, Donald") + .withSubject("ExtraEntry") + .build(); + + String bitstreamContent = "ThisIsSomeDummyText"; + //Add a bitstream to an item + Bitstream bitstream = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream") + .withDescription("Description") + .withMimeType("text/plain") + .build(); + } + + context.restoreAuthSystemState(); + + String token = getAuthToken(admin.getEmail(), password); + + // Delete + getClient(token).perform(delete("/api/core/bitstreams") + .contentType(TEXT_URI_LIST) + .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream.getID())) + .andExpect(status().is(204)); + + // Verify 404 when trying to delete a non-existing, already deleted, bitstream + getClient(token).perform(delete("/api/core/bitstreams") + .contentType(TEXT_URI_LIST) + .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream.getID())) + .andExpect(status().is(422)); + } + + @Test + public void deleteListOneDeleted() throws Exception { + //We turn off the authorization system in order to create the structure as defined below + context.turnOffAuthorisationSystem(); + + //** GIVEN ** + //1. A community-collection structure with one parent community with sub-community and one collection. + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); + + //2. One public items that is readable by Anonymous + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Test") + .withIssueDate("2010-10-17") + .withAuthor("Smith, Donald") + .withSubject("ExtraEntry") + .build(); + + // Add 3 bitstreams to the item + String bitstreamContent1 = "ThisIsSomeDummyText1"; + Bitstream bitstream1 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent1, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream1") + .withDescription("Description1") + .withMimeType("text/plain") + .build(); + } + + String bitstreamContent2 = "ThisIsSomeDummyText2"; + Bitstream bitstream2 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent2, CharEncoding.UTF_8)) { + bitstream2 = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream2") + .withDescription("Description2") + .withMimeType("text/plain") + .build(); + } + + String bitstreamContent3 = "ThisIsSomeDummyText3"; + Bitstream bitstream3 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent3, CharEncoding.UTF_8)) { + bitstream3 = BitstreamBuilder. + createBitstream(context, publicItem1, is) + .withName("Bitstream3") + .withDescription("Description3") + .withMimeType("text/plain") + .build(); + } + + context.restoreAuthSystemState(); + + String token = getAuthToken(admin.getEmail(), password); + + // Delete bitstream1 + getClient(token).perform(delete("/api/core/bitstreams") + .contentType(TEXT_URI_LIST) + .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream1.getID())) + .andExpect(status().is(204)); + + // Verify 404 when trying to delete a non-existing, already deleted, bitstream + getClient(token).perform(delete("/api/core/bitstreams") + .contentType(TEXT_URI_LIST) + .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream1.getID() + + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream2.getID() + + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream3.getID())) + .andExpect(status().is(422)); + } + @Test public void patchBitstreamMetadataAuthorized() throws Exception { runPatchMetadataTests(admin, 200); From 464465560187002f0d50dbd0f6a9f12044a42723 Mon Sep 17 00:00:00 2001 From: jensroets Date: Wed, 14 Sep 2022 15:49:03 +0200 Subject: [PATCH 002/125] 94299 Multiple Bitstream deletion endpoint: rename items to parents --- .../dspace/app/rest/repository/BitstreamRestRepository.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java index f599d993be..3696b38668 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java @@ -161,7 +161,7 @@ public class BitstreamRestRepository extends DSpaceObjectRestRepository items = new ArrayList<>(); + List parents = new ArrayList<>(); for (DSpaceObject dso : dsoList) { Bitstream bit = bs.find(context, dso.getID()); DSpaceObject bitstreamParent = bs.getParentObject(context, bit); @@ -173,10 +173,10 @@ public class BitstreamRestRepository extends DSpaceObjectRestRepository 1) { + if (parents.stream().distinct().count() > 1) { throw new UnprocessableEntityException("Not all given items are part of the same Item."); } // delete all Bitstreams From 8e2ada65b191d55bc86002bef10e2a4707cb4d2a Mon Sep 17 00:00:00 2001 From: Yana De Pauw Date: Tue, 6 Dec 2022 12:36:34 +0100 Subject: [PATCH 003/125] 97248: Fix File info Solr plugin to allow faceting --- .../org/dspace/discovery/SolrServiceFileInfoPlugin.java | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceFileInfoPlugin.java b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceFileInfoPlugin.java index 52e0043ff4..c53b48f80f 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceFileInfoPlugin.java +++ b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceFileInfoPlugin.java @@ -53,10 +53,14 @@ public class SolrServiceFileInfoPlugin implements SolrServiceIndexPlugin { if (bitstreams != null) { for (Bitstream bitstream : bitstreams) { document.addField(SOLR_FIELD_NAME_FOR_FILENAMES, bitstream.getName()); + document.addField(SOLR_FIELD_NAME_FOR_FILENAMES + "_keyword", bitstream.getName()); + document.addField(SOLR_FIELD_NAME_FOR_FILENAMES + "_filter", bitstream.getName()); String description = bitstream.getDescription(); if ((description != null) && !description.isEmpty()) { document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS, description); + document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS + "_keyword", bitstream.getName()); + document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS + "_filter", bitstream.getName()); } } } @@ -65,4 +69,4 @@ public class SolrServiceFileInfoPlugin implements SolrServiceIndexPlugin { } } } -} \ No newline at end of file +} From 3e651af7605853b013fe52607b0701f797090a28 Mon Sep 17 00:00:00 2001 From: Yana De Pauw Date: Tue, 6 Dec 2022 12:37:21 +0100 Subject: [PATCH 004/125] 97248: Find DSO based configurations recursively through parent objects --- .../org/dspace/discovery/SearchUtils.java | 45 ++++++++++++----- .../DiscoveryConfigurationService.java | 49 +++++++++++++++++-- .../CollectionIndexFactoryImpl.java | 4 +- .../CommunityIndexFactoryImpl.java | 4 +- .../InprogressSubmissionIndexFactoryImpl.java | 6 +-- .../indexobject/ItemIndexFactoryImpl.java | 2 +- .../repository/DiscoveryRestRepository.java | 10 ++-- 7 files changed, 89 insertions(+), 31 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java b/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java index 90afb09eca..83cbdeaef6 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java +++ b/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java @@ -18,6 +18,7 @@ import org.dspace.content.Collection; import org.dspace.content.DSpaceObject; import org.dspace.content.Item; import org.dspace.content.WorkspaceItem; +import org.dspace.core.Context; import org.dspace.discovery.configuration.DiscoveryConfiguration; import org.dspace.discovery.configuration.DiscoveryConfigurationService; import org.dspace.kernel.ServiceManager; @@ -60,28 +61,32 @@ public class SearchUtils { } public static DiscoveryConfiguration getDiscoveryConfiguration() { - return getDiscoveryConfiguration(null, null); + return getDiscoveryConfiguration(null, null, null); } - public static DiscoveryConfiguration getDiscoveryConfiguration(DSpaceObject dso) { - return getDiscoveryConfiguration(null, dso); + public static DiscoveryConfiguration getDiscoveryConfiguration(final Context context, + DSpaceObject dso) { + return getDiscoveryConfiguration(context, null, dso); } /** * Return the discovery configuration to use in a specific scope for the king of search identified by the prefix. A * null prefix mean the normal query, other predefined values are workspace or workflow * + * + * @param context * @param prefix * the namespace of the configuration to lookup if any * @param dso * the DSpaceObject * @return the discovery configuration for the specified scope */ - public static DiscoveryConfiguration getDiscoveryConfiguration(String prefix, DSpaceObject dso) { + public static DiscoveryConfiguration getDiscoveryConfiguration(final Context context, String prefix, + DSpaceObject dso) { if (prefix != null) { return getDiscoveryConfigurationByName(dso != null ? prefix + "." + dso.getHandle() : prefix); } else { - return getDiscoveryConfigurationByName(dso != null ? dso.getHandle() : null); + return getDiscoveryConfigurationByDSO(context, dso); } } @@ -98,6 +103,11 @@ public class SearchUtils { return configurationService.getDiscoveryConfiguration(configurationName); } + public static DiscoveryConfiguration getDiscoveryConfigurationByDSO( + Context context, DSpaceObject dso) { + DiscoveryConfigurationService configurationService = getConfigurationService(); + return configurationService.getDiscoveryDSOConfiguration(context, dso); + } public static DiscoveryConfigurationService getConfigurationService() { ServiceManager manager = DSpaceServicesFactory.getInstance().getServiceManager(); @@ -114,45 +124,54 @@ public class SearchUtils { * A configuration object can be returned for each parent community/collection * * @param item the DSpace item + * @param context * @return a list of configuration objects * @throws SQLException An exception that provides information on a database access error or other errors. */ - public static List getAllDiscoveryConfigurations(Item item) throws SQLException { + public static List getAllDiscoveryConfigurations(Item item, + final Context context) throws SQLException { List collections = item.getCollections(); - return getAllDiscoveryConfigurations(null, collections, item); + return getAllDiscoveryConfigurations(context, null, collections, item); } /** * Return all the discovery configuration applicable to the provided workspace item + * + * @param context * @param witem a workspace item * @return a list of discovery configuration * @throws SQLException */ - public static List getAllDiscoveryConfigurations(WorkspaceItem witem) throws SQLException { + public static List getAllDiscoveryConfigurations(final Context context, + WorkspaceItem witem) throws SQLException { List collections = new ArrayList(); collections.add(witem.getCollection()); - return getAllDiscoveryConfigurations("workspace", collections, witem.getItem()); + return getAllDiscoveryConfigurations(context, "workspace", collections, witem.getItem()); } /** * Return all the discovery configuration applicable to the provided workflow item + * + * @param context * @param witem a workflow item * @return a list of discovery configuration * @throws SQLException */ - public static List getAllDiscoveryConfigurations(WorkflowItem witem) throws SQLException { + public static List getAllDiscoveryConfigurations(final Context context, + WorkflowItem witem) throws SQLException { List collections = new ArrayList(); collections.add(witem.getCollection()); - return getAllDiscoveryConfigurations("workflow", collections, witem.getItem()); + return getAllDiscoveryConfigurations(context, "workflow", collections, witem.getItem()); } - private static List getAllDiscoveryConfigurations(String prefix, + private static List getAllDiscoveryConfigurations(final Context context, + String prefix, List collections, Item item) throws SQLException { Set result = new HashSet<>(); for (Collection collection : collections) { - DiscoveryConfiguration configuration = getDiscoveryConfiguration(prefix, collection); + DiscoveryConfiguration configuration = getDiscoveryConfiguration(context, prefix, collection); result.add(configuration); } diff --git a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java index 636e7ccd2a..b00ff73563 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java +++ b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java @@ -7,12 +7,20 @@ */ package org.dspace.discovery.configuration; +import java.sql.SQLException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.content.DSpaceObject; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.DSpaceObjectService; +import org.dspace.core.Context; +import org.dspace.core.ReloadableEntity; import org.dspace.discovery.IndexableObject; import org.dspace.discovery.indexobject.IndexableDSpaceObject; import org.dspace.services.factory.DSpaceServicesFactory; @@ -22,6 +30,8 @@ import org.dspace.services.factory.DSpaceServicesFactory; */ public class DiscoveryConfigurationService { + private static final Logger log = LogManager.getLogger(); + private Map map; private Map> toIgnoreMetadataFields = new HashMap<>(); @@ -41,25 +51,53 @@ public class DiscoveryConfigurationService { this.toIgnoreMetadataFields = toIgnoreMetadataFields; } - public DiscoveryConfiguration getDiscoveryConfiguration(IndexableObject dso) { + public DiscoveryConfiguration getDiscoveryConfiguration(final Context context, + IndexableObject dso) { String name; if (dso == null) { name = "default"; } else if (dso instanceof IndexableDSpaceObject) { - name = ((IndexableDSpaceObject) dso).getIndexedObject().getHandle(); + return getDiscoveryDSOConfiguration(context, ((IndexableDSpaceObject) dso).getIndexedObject()); } else { name = dso.getUniqueIndexID(); } - return getDiscoveryConfiguration(name); } + public DiscoveryConfiguration getDiscoveryDSOConfiguration(final Context context, + DSpaceObject dso) { + String name; + if (dso == null) { + name = "default"; + } else { + name = dso.getHandle(); + } + + DiscoveryConfiguration configuration = getDiscoveryConfiguration(name, false); + if (configuration != null) { + return configuration; + } + DSpaceObjectService dSpaceObjectService = + ContentServiceFactory.getInstance().getDSpaceObjectService(dso); + DSpaceObject parentObject = null; + try { + parentObject = dSpaceObjectService.getParentObject(context, dso); + } catch (SQLException e) { + log.error(e); + } + return getDiscoveryDSOConfiguration(context, parentObject); + } + public DiscoveryConfiguration getDiscoveryConfiguration(final String name) { + return getDiscoveryConfiguration(name, true); + } + + public DiscoveryConfiguration getDiscoveryConfiguration(final String name, boolean useDefault) { DiscoveryConfiguration result; result = StringUtils.isBlank(name) ? null : getMap().get(name); - if (result == null) { + if (result == null && useDefault) { //No specific configuration, get the default one result = getMap().get("default"); } @@ -68,11 +106,12 @@ public class DiscoveryConfigurationService { } public DiscoveryConfiguration getDiscoveryConfigurationByNameOrDso(final String configurationName, + final Context context, final IndexableObject dso) { if (StringUtils.isNotBlank(configurationName) && getMap().containsKey(configurationName)) { return getMap().get(configurationName); } else { - return getDiscoveryConfiguration(dso); + return getDiscoveryConfiguration(context, dso); } } diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/CollectionIndexFactoryImpl.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/CollectionIndexFactoryImpl.java index c2bacfe502..817be7848d 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/CollectionIndexFactoryImpl.java +++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/CollectionIndexFactoryImpl.java @@ -86,7 +86,7 @@ public class CollectionIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl highlightedMetadataFields = new ArrayList<>(); @@ -173,4 +173,4 @@ public class CollectionIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl highlightedMetadataFields = new ArrayList<>(); @@ -135,4 +135,4 @@ public class CommunityIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl discoveryConfigurations; if (inProgressSubmission instanceof WorkflowItem) { - discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations((WorkflowItem) inProgressSubmission); + discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context, (WorkflowItem) inProgressSubmission); } else if (inProgressSubmission instanceof WorkspaceItem) { - discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations((WorkspaceItem) inProgressSubmission); + discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context, (WorkspaceItem) inProgressSubmission); } else { - discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(item); + discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(item, context); } indexableItemService.addDiscoveryFields(doc, context, item, discoveryConfigurations); indexableCollectionService.storeCommunityCollectionLocations(doc, locations); diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java index e9f18ae949..b417237f76 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java +++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java @@ -147,7 +147,7 @@ public class ItemIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(item); + List discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(item, context); addDiscoveryFields(doc, context, indexableItem.getIndexedObject(), discoveryConfigurations); //mandatory facet to show status on mydspace diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DiscoveryRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DiscoveryRestRepository.java index 52224ef579..1962d44162 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DiscoveryRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DiscoveryRestRepository.java @@ -84,7 +84,7 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository { IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope); DiscoveryConfiguration discoveryConfiguration = searchConfigurationService - .getDiscoveryConfigurationByNameOrDso(configuration, scopeObject); + .getDiscoveryConfigurationByNameOrDso(configuration, context, scopeObject); return discoverConfigurationConverter.convert(discoveryConfiguration, utils.obtainProjection()); } @@ -96,7 +96,7 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository { Context context = obtainContext(); IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope); DiscoveryConfiguration discoveryConfiguration = searchConfigurationService - .getDiscoveryConfigurationByNameOrDso(configuration, scopeObject); + .getDiscoveryConfigurationByNameOrDso(configuration, context, scopeObject); DiscoverResult searchResult = null; DiscoverQuery discoverQuery = null; @@ -121,7 +121,7 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository { IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope); DiscoveryConfiguration discoveryConfiguration = searchConfigurationService - .getDiscoveryConfigurationByNameOrDso(configuration, scopeObject); + .getDiscoveryConfigurationByNameOrDso(configuration, context, scopeObject); return discoverFacetConfigurationConverter.convert(configuration, dsoScope, discoveryConfiguration); } @@ -138,7 +138,7 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository { IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope); DiscoveryConfiguration discoveryConfiguration = searchConfigurationService - .getDiscoveryConfigurationByNameOrDso(configuration, scopeObject); + .getDiscoveryConfigurationByNameOrDso(configuration, context, scopeObject); DiscoverQuery discoverQuery = queryBuilder.buildFacetQuery(context, scopeObject, discoveryConfiguration, prefix, query, searchFilters, dsoTypes, page, facetName); @@ -157,7 +157,7 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository { Pageable page = PageRequest.of(1, 1); IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope); DiscoveryConfiguration discoveryConfiguration = searchConfigurationService - .getDiscoveryConfigurationByNameOrDso(configuration, scopeObject); + .getDiscoveryConfigurationByNameOrDso(configuration, context, scopeObject); DiscoverResult searchResult = null; DiscoverQuery discoverQuery = null; From 82bc777e45dce2525e2754fc338d27e7630bad1d Mon Sep 17 00:00:00 2001 From: Yana De Pauw Date: Tue, 13 Dec 2022 12:32:15 +0100 Subject: [PATCH 005/125] Fix issue with indexing and add tests --- .../org/dspace/discovery/SearchUtils.java | 32 +- .../discovery/SolrServiceFileInfoPlugin.java | 6 +- .../DiscoveryConfigurationService.java | 9 +- .../InprogressSubmissionIndexFactoryImpl.java | 6 +- .../org/dspace/builder/CommunityBuilder.java | 24 +- .../config/spring/api/discovery.xml | 3198 +++++++++++++++++ .../DiscoveryScopeBasedRestControllerIT.java | 595 +++ .../app/rest/matcher/FacetEntryMatcher.java | 11 + .../app/rest/matcher/FacetValueMatcher.java | 10 + machine.cfg | 19 + 10 files changed, 3889 insertions(+), 21 deletions(-) create mode 100644 dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/discovery.xml create mode 100644 dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java create mode 100644 machine.cfg diff --git a/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java b/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java index 83cbdeaef6..4085e1bbdf 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java +++ b/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java @@ -18,6 +18,8 @@ import org.dspace.content.Collection; import org.dspace.content.DSpaceObject; import org.dspace.content.Item; import org.dspace.content.WorkspaceItem; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.DSpaceObjectService; import org.dspace.core.Context; import org.dspace.discovery.configuration.DiscoveryConfiguration; import org.dspace.discovery.configuration.DiscoveryConfigurationService; @@ -72,7 +74,7 @@ public class SearchUtils { /** * Return the discovery configuration to use in a specific scope for the king of search identified by the prefix. A * null prefix mean the normal query, other predefined values are workspace or workflow - * + * * * @param context * @param prefix @@ -90,9 +92,28 @@ public class SearchUtils { } } + public static Set addDiscoveryConfigurationForParents( + Context context, Set configurations, String prefix, DSpaceObject dso) + throws SQLException { + if (dso == null) { + configurations.add(getDiscoveryConfigurationByName(null)); + return configurations; + } + if (prefix != null) { + configurations.add(getDiscoveryConfigurationByName(prefix + "." + dso.getHandle())); + } else { + configurations.add(getDiscoveryConfigurationByName(dso.getHandle())); + } + + DSpaceObjectService dSpaceObjectService = ContentServiceFactory.getInstance() + .getDSpaceObjectService(dso); + DSpaceObject parentObject = dSpaceObjectService.getParentObject(context, dso); + return addDiscoveryConfigurationForParents(context, configurations, prefix, parentObject); + } + /** * Return the discovery configuration identified by the specified name - * + * * @param configurationName the configuration name assigned to the bean in the * discovery.xml * @return the discovery configuration @@ -128,8 +149,8 @@ public class SearchUtils { * @return a list of configuration objects * @throws SQLException An exception that provides information on a database access error or other errors. */ - public static List getAllDiscoveryConfigurations(Item item, - final Context context) throws SQLException { + public static List getAllDiscoveryConfigurations(Item item, Context context) + throws SQLException { List collections = item.getCollections(); return getAllDiscoveryConfigurations(context, null, collections, item); } @@ -171,8 +192,7 @@ public class SearchUtils { Set result = new HashSet<>(); for (Collection collection : collections) { - DiscoveryConfiguration configuration = getDiscoveryConfiguration(context, prefix, collection); - result.add(configuration); + addDiscoveryConfigurationForParents(context, result, prefix, collection); } //Add alwaysIndex configurations diff --git a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceFileInfoPlugin.java b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceFileInfoPlugin.java index c53b48f80f..6bda2fc52d 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceFileInfoPlugin.java +++ b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceFileInfoPlugin.java @@ -59,8 +59,10 @@ public class SolrServiceFileInfoPlugin implements SolrServiceIndexPlugin { String description = bitstream.getDescription(); if ((description != null) && !description.isEmpty()) { document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS, description); - document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS + "_keyword", bitstream.getName()); - document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS + "_filter", bitstream.getName()); + document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS + "_keyword", + bitstream.getName()); + document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS + "_filter", + bitstream.getName()); } } } diff --git a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java index b00ff73563..22443aec22 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java +++ b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java @@ -20,7 +20,6 @@ import org.dspace.content.DSpaceObject; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.DSpaceObjectService; import org.dspace.core.Context; -import org.dspace.core.ReloadableEntity; import org.dspace.discovery.IndexableObject; import org.dspace.discovery.indexobject.IndexableDSpaceObject; import org.dspace.services.factory.DSpaceServicesFactory; @@ -135,9 +134,9 @@ public class DiscoveryConfigurationService { System.out.println(DSpaceServicesFactory.getInstance().getServiceManager().getServicesNames().size()); DiscoveryConfigurationService mainService = DSpaceServicesFactory.getInstance().getServiceManager() .getServiceByName( - DiscoveryConfigurationService.class - .getName(), - DiscoveryConfigurationService.class); + DiscoveryConfigurationService.class + .getName(), + DiscoveryConfigurationService.class); for (String key : mainService.getMap().keySet()) { System.out.println(key); @@ -165,7 +164,7 @@ public class DiscoveryConfigurationService { System.out.println("Recent submissions configuration:"); DiscoveryRecentSubmissionsConfiguration recentSubmissionConfiguration = discoveryConfiguration - .getRecentSubmissionConfiguration(); + .getRecentSubmissionConfiguration(); System.out.println("\tMetadata sort field: " + recentSubmissionConfiguration.getMetadataSortField()); System.out.println("\tMax recent submissions: " + recentSubmissionConfiguration.getMax()); diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/InprogressSubmissionIndexFactoryImpl.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/InprogressSubmissionIndexFactoryImpl.java index c3629b6362..ebedfc34b7 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/InprogressSubmissionIndexFactoryImpl.java +++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/InprogressSubmissionIndexFactoryImpl.java @@ -73,9 +73,11 @@ public abstract class InprogressSubmissionIndexFactoryImpl // Add item metadata List discoveryConfigurations; if (inProgressSubmission instanceof WorkflowItem) { - discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context, (WorkflowItem) inProgressSubmission); + discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context, + (WorkflowItem) inProgressSubmission); } else if (inProgressSubmission instanceof WorkspaceItem) { - discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context, (WorkspaceItem) inProgressSubmission); + discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context, + (WorkspaceItem) inProgressSubmission); } else { discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(item, context); } diff --git a/dspace-api/src/test/java/org/dspace/builder/CommunityBuilder.java b/dspace-api/src/test/java/org/dspace/builder/CommunityBuilder.java index 5ba36af8f4..1f0e8fbd66 100644 --- a/dspace-api/src/test/java/org/dspace/builder/CommunityBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/CommunityBuilder.java @@ -32,27 +32,38 @@ public class CommunityBuilder extends AbstractDSpaceObjectBuilder { private Community community; + protected CommunityBuilder(Context context) { super(context); } public static CommunityBuilder createCommunity(final Context context) { CommunityBuilder builder = new CommunityBuilder(context); - return builder.create(); + return builder.create(null); + } + public static CommunityBuilder createCommunity(final Context context, String handle) { + CommunityBuilder builder = new CommunityBuilder(context); + return builder.create(handle); } - private CommunityBuilder create() { - return createSubCommunity(context, null); + private CommunityBuilder create(String handle) { + return createSubCommunity(context, null, handle); } public static CommunityBuilder createSubCommunity(final Context context, final Community parent) { CommunityBuilder builder = new CommunityBuilder(context); - return builder.createSub(parent); + return builder.createSub(parent, null); } - private CommunityBuilder createSub(final Community parent) { + public static CommunityBuilder createSubCommunity(final Context context, final Community parent, + final String handle) { + CommunityBuilder builder = new CommunityBuilder(context); + return builder.createSub(parent, handle); + } + + private CommunityBuilder createSub(final Community parent, String handle) { try { - community = communityService.create(parent, context); + community = communityService.create(parent, context, handle); } catch (Exception e) { e.printStackTrace(); return null; @@ -102,6 +113,7 @@ public class CommunityBuilder extends AbstractDSpaceObjectBuilder { @Override public Community build() { try { + communityService.update(context, community); context.dispatchEvents(); diff --git a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/discovery.xml b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/discovery.xml new file mode 100644 index 0000000000..6ffcbe661c --- /dev/null +++ b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/discovery.xml @@ -0,0 +1,3198 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.rights + + + + + + + + + + + + + + + dc.rights + + + + + + + + dc.description.provenance + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item + + withdrawn:true OR discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:WorkspaceItem OR search.resourcetype:XmlWorkflowItem + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:PoolTask OR search.resourcetype:ClaimedTask + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:XmlWorkflowItem + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND entityType_keyword:Publication + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND entityType_keyword:Person + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND entityType_keyword:Project + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND entityType_keyword:OrgUnit + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND entityType_keyword:JournalIssue + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND entityType_keyword:JournalVolume + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND entityType_keyword:Journal + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND (entityType_keyword:OrgUnit OR entityType_keyword:Person) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND entityType_keyword:OrgUnit AND dc.type:FundingOrganization + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + + + + + + + + + + + relation.isAuthorOfPublication + + + + + + + + + + + relation.isProjectOfPublication + + + + + + + + + + + + relation.isOrgUnitOfPublication + + + + + + + + + + + relation.isPublicationOfJournalIssue + + + + + + + + + + + relation.isJournalOfPublication + + + + + + + + + + + dc.contributor.author + dc.creator + + + + + + + + + + + + + + + dspace.entity.type + + + + + + + + + + + + + + dc.subject.* + + + + + + + + + + + + + + dc.date.issued + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.type + + + + + + + + + dc.identifier + + + + + + + + + placeholder.placeholder.placeholder + + + + + + + + + + placeholder.placeholder.placeholder + + + + + + + + + person.jobTitle + + + + + + + + + + + + + + + person.knowsLanguage + + + + + + + + + + + + + person.birthDate + + + + + + + + + + + + + + + + + person.familyName + + + + + + + + + + + person.givenName + + + + + + + + + + + relation.isOrgUnitOfPerson + + + + + + + + + + + relation.isProjectOfPerson + + + + + + + + + + + relation.isPublicationOfAuthor + + + + + + + + + + + + organization.address.addressCountry + + + + + + + + + + + + + + + organization.address.addressLocality + + + + + + + + + + + + + + + organization.foundingDate + + + + + + + + + + + + + + + + organization.legalName + + + + + + + + + + + relation.isPersonOfOrgUnit + + + + + + + + + + + relation.isProjectOfOrgUnit + + + + + + + + + + + relation.isPublicationOfOrgUnit + + + + + + + + + + + creativework.keywords + + + + + + + + + + + + + + + creativework.datePublished + + + + + + + + + + + + + + + + publicationissue.issueNumber + + + + + + + + + + + relation.isPublicationOfJournalIssue + + + + + + + + + + + publicationVolume.volumeNumber + + + + + + + + + + + relation.isIssueOfJournalVolume + + + + + + + + + + + relation.isJournalOfVolume + + + + + + + + + + + creativework.publisher + + + + + + + + + + + + + + + creativework.editor + + + + + + + + + + + + + + + relation.isVolumeOfJournal + + + + + + + + + + + + + + placeholder.placeholder.placeholder + + + + + + + + + + relation.isOrgUnitOfProject + + + + + + + + + + + + relation.isPersonOfProject + + + + + + + + + + + + relation.isPublicationOfProject + + + + + + + + + + + relation.isContributorOfPublication + + + + + + + + + + + relation.isPublicationOfContributor + + + + + + + + + + + relation.isFundingAgencyOfProject + + + + + + + + + + + relation.isProjectOfFundingAgency + + + + + + + + + + + dc.test.parentcommunity1field + + + + + + + + + + + + + + + dc.test.subcommunity11field + + + + + + + + + + + + + + + dc.test.collection111field + + + + + + + + + + + + + + + dc.test.collection121field + + + + + + + + + + + + + + + dc.test.subcommunity21field + + + + + + + + + + + + + + dc.test.collection211field + + + + + + + + + + + + + + dc.test.collection221field + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java new file mode 100644 index 0000000000..a0edf1a0c7 --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java @@ -0,0 +1,595 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest; + +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.is; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +import org.dspace.app.rest.matcher.FacetEntryMatcher; +import org.dspace.app.rest.matcher.FacetValueMatcher; +import org.dspace.app.rest.test.AbstractControllerIntegrationTest; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ItemBuilder; +import org.dspace.builder.MetadataFieldBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.dspace.content.Item; +import org.dspace.content.service.CollectionService; +import org.junit.Before; +import org.junit.Test; +import org.springframework.beans.factory.annotation.Autowired; + +public class DiscoveryScopeBasedRestControllerIT extends AbstractControllerIntegrationTest { + + @Autowired + CollectionService collectionService; + + private Community community1; + private Community subcommunity11; + private Community subcommunity12; + private Collection collection111; + private Collection collection112; + private Collection collection121; + private Collection collection122; + + private Community community2; + private Community subcommunity21; + private Community subcommunity22; + private Collection collection211; + private Collection collection212; + private Collection collection221; + private Collection collection222; + + @Before + public void setUp() throws Exception { + super.setUp(); + + context.turnOffAuthorisationSystem(); + + MetadataFieldBuilder.createMetadataField(context, "test", "parentcommunity1field", "").build(); + MetadataFieldBuilder.createMetadataField(context, "test", "subcommunity11field", "").build(); + MetadataFieldBuilder.createMetadataField(context, "test", "collection111field", "").build(); + MetadataFieldBuilder.createMetadataField(context, "test", "collection121field", "").build(); + MetadataFieldBuilder.createMetadataField(context, "test", "subcommunity21field", "").build(); + MetadataFieldBuilder.createMetadataField(context, "test", "collection211field", "").build(); + MetadataFieldBuilder.createMetadataField(context, "test", "collection221field", "").build(); + + community1 = CommunityBuilder.createCommunity(context, "123456789/discovery-parent-community-1") + .build(); + subcommunity11 = CommunityBuilder + .createSubCommunity(context, community1, "123456789/discovery-sub-community-1-1") + .build(); + subcommunity12 = CommunityBuilder + .createSubCommunity(context, community1, "123456789/discovery-sub-community-1-2") + .build(); + collection111 = CollectionBuilder + .createCollection(context, subcommunity11, "123456789/discovery-collection-1-1-1") + .build(); + collection112 = CollectionBuilder + .createCollection(context, subcommunity11, "123456789/discovery-collection-1-1-2") + .build(); + collection121 = CollectionBuilder + .createCollection(context, subcommunity12, "123456789/discovery-collection-1-2-1") + .build(); + + collection122 = CollectionBuilder + .createCollection(context, subcommunity12, "123456789/discovery-collection-1-2-2") + .build(); + + community2 = CommunityBuilder.createCommunity(context, "123456789/discovery-parent-community-2") + .build(); + + + subcommunity21 = CommunityBuilder + .createSubCommunity(context, community2, "123456789/discovery-sub-community-2-1") + .build(); + subcommunity22 = CommunityBuilder + .createSubCommunity(context, community2, "123456789/discovery-sub-community-2-2") + .build(); + collection211 = CollectionBuilder + .createCollection(context, subcommunity21, "123456789/discovery-collection-2-1-1") + .build(); + collection212 = CollectionBuilder + .createCollection(context, subcommunity21, "123456789/discovery-collection-2-1-2") + .build(); + collection221 = CollectionBuilder + .createCollection(context, subcommunity22, "123456789/discovery-collection-2-2-1") + .build(); + collection222 = CollectionBuilder + .createCollection(context, subcommunity22, "123456789/discovery-collection-2-2-2") + .build(); + + + Item item111 = ItemBuilder.createItem(context, collection111) + .withMetadata("dc", "contributor", "author", "author-item111") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item111") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item111") + .withMetadata("dc", "test", "collection111field", "collection111field-item111") + .withMetadata("dc", "test", "collection121field", "collection121field-item111") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item111") + .withMetadata("dc", "test", "collection211field", "collection211field-item111") + .withMetadata("dc", "test", "collection221field", "collection221field-item111") + .build(); + + Item item112 = ItemBuilder.createItem(context, collection112) + .withMetadata("dc", "contributor", "author", "author-item112") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item112") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item112") + .withMetadata("dc", "test", "collection111field", "collection111field-item112") + .withMetadata("dc", "test", "collection121field", "collection121field-item112") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item112") + .withMetadata("dc", "test", "collection211field", "collection211field-item112") + .withMetadata("dc", "test", "collection221field", "collection221field-item112") + .build(); + + Item item121 = ItemBuilder.createItem(context, collection121) + .withMetadata("dc", "contributor", "author", "author-item121") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item121") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item121") + .withMetadata("dc", "test", "collection111field", "collection111field-item121") + .withMetadata("dc", "test", "collection121field", "collection121field-item121") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item121") + .withMetadata("dc", "test", "collection211field", "collection211field-item121") + .withMetadata("dc", "test", "collection221field", "collection221field-item121") + .build(); + + Item item122 = ItemBuilder.createItem(context, collection122) + .withMetadata("dc", "contributor", "author", "author-item122") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item122") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item122") + .withMetadata("dc", "test", "collection111field", "collection111field-item122") + .withMetadata("dc", "test", "collection121field", "collection121field-item122") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item122") + .withMetadata("dc", "test", "collection211field", "collection211field-item122") + .withMetadata("dc", "test", "collection221field", "collection221field-item122") + .build(); + + Item item211 = ItemBuilder.createItem(context, collection211) + .withMetadata("dc", "contributor", "author", "author-item211") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item211") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item211") + .withMetadata("dc", "test", "collection111field", "collection111field-item211") + .withMetadata("dc", "test", "collection121field", "collection121field-item211") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item211") + .withMetadata("dc", "test", "collection211field", "collection211field-item211") + .withMetadata("dc", "test", "collection221field", "collection221field-item211") + .build(); + + Item item212 = ItemBuilder.createItem(context, collection212) + .withMetadata("dc", "contributor", "author", "author-item212") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item212") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item212") + .withMetadata("dc", "test", "collection111field", "collection111field-item212") + .withMetadata("dc", "test", "collection121field", "collection121field-item212") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item212") + .withMetadata("dc", "test", "collection211field", "collection211field-item212") + .withMetadata("dc", "test", "collection221field", "collection221field-item212") + .build(); + + Item item221 = ItemBuilder.createItem(context, collection221) + .withMetadata("dc", "contributor", "author", "author-item221") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item221") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item221") + .withMetadata("dc", "test", "collection111field", "collection111field-item221") + .withMetadata("dc", "test", "collection121field", "collection121field-item221") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item221") + .withMetadata("dc", "test", "collection211field", "collection211field-item221") + .withMetadata("dc", "test", "collection221field", "collection221field-item221") + .build(); + + Item item222 = ItemBuilder.createItem(context, collection222) + .withMetadata("dc", "contributor", "author", "author-item222") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-item222") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-item222") + .withMetadata("dc", "test", "collection111field", "collection111field-item222") + .withMetadata("dc", "test", "collection121field", "collection121field-item222") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-item222") + .withMetadata("dc", "test", "collection211field", "collection211field-item222") + .withMetadata("dc", "test", "collection221field", "collection221field-item222") + .build(); + + Item mappedItem111222 = ItemBuilder + .createItem(context, collection111) + .withMetadata("dc", "contributor", "author", "author-mappedItem111222") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-mappedItem111222") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-mappedItem111222") + .withMetadata("dc", "test", "collection111field", "collection111field-mappedItem111222") + .withMetadata("dc", "test", "collection121field", "collection121field-mappedItem111222") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-mappedItem111222") + .withMetadata("dc", "test", "collection211field", "collection211field-mappedItem111222") + .withMetadata("dc", "test", "collection221field", "collection221field-mappedItem111222") + .build(); + + + Item mappedItem122211 = ItemBuilder + .createItem(context, collection122) + .withMetadata("dc", "contributor", "author", "author-mappedItem122211") + .withMetadata("dc", "test", "parentcommunity1field", "parentcommunity1field-mappedItem122211") + .withMetadata("dc", "test", "subcommunity11field", "subcommunity11field-mappedItem122211") + .withMetadata("dc", "test", "collection111field", "collection111field-mappedItem122211") + .withMetadata("dc", "test", "collection121field", "collection121field-mappedItem122211") + .withMetadata("dc", "test", "subcommunity21field", "subcommunity21field-mappedItem122211") + .withMetadata("dc", "test", "collection211field", "collection211field-mappedItem122211") + .withMetadata("dc", "test", "collection221field", "collection221field-mappedItem122211") + .build(); + + + collectionService.addItem(context, collection222, mappedItem111222); + collectionService.addItem(context, collection211, mappedItem122211); + + + context.dispatchEvents(); + context.restoreAuthSystemState(); + } + + @Test + public void ScopeBasedIndexingAndSearchTestParentCommunity1() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(community1.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet("parentcommunity1field", "text", false))) + ); + + getClient().perform(get("/api/discover/facets/parentcommunity1field") + .param("scope", String.valueOf(community1.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-item111", 1), + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-item112", 1), + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-item121", 1), + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-item122", 1), + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-mappedItem111222", + 1), + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-mappedItem122211", 1) + ) + )); + + + } + + @Test + public void ScopeBasedIndexingAndSearchTestSubCommunity11() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(subcommunity11.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet("subcommunity11field", "text", false))) + ); + + getClient().perform(get("/api/discover/facets/subcommunity11field") + .param("scope", String.valueOf(subcommunity11.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("subcommunity11field", + "subcommunity11field-item111", 1), + FacetValueMatcher.matchEntry("subcommunity11field", + "subcommunity11field-item112", 1), + FacetValueMatcher.matchEntry("subcommunity11field", + "subcommunity11field-mappedItem111222", 1) + ) + )); + } + + @Test + public void ScopeBasedIndexingAndSearchTestCollection111() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection111.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet("collection111field", "text", false))) + ); + + getClient().perform(get("/api/discover/facets/collection111field") + .param("scope", String.valueOf(collection111.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("collection111field", + "collection111field-item111", 1), + FacetValueMatcher.matchEntry("collection111field", + "collection111field-mappedItem111222", 1) + ) + )); + } + + @Test + public void ScopeBasedIndexingAndSearchTestCollection112() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection112.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet("subcommunity11field", "text", false))) + ); + + getClient().perform(get("/api/discover/facets/subcommunity11field") + .param("scope", String.valueOf(collection112.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("subcommunity11field", + "subcommunity11field-item112", 1) + ) + )); + } + + @Test + public void ScopeBasedIndexingAndSearchTestSubcommunity12() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(subcommunity12.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet("parentcommunity1field", "text", false))) + ); + + getClient().perform(get("/api/discover/facets/parentcommunity1field") + .param("scope", String.valueOf(subcommunity12.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-item121", 1), + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-item122", 1), + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-mappedItem122211", 1) + ) + )); + } + + @Test + public void ScopeBasedIndexingAndSearchTestCollection121() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection121.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet("collection121field", "text", false))) + ); + + getClient().perform(get("/api/discover/facets/collection121field") + .param("scope", String.valueOf(collection121.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("collection121field", + "collection121field-item121", 1) + ) + )); + } + + @Test + public void ScopeBasedIndexingAndSearchTestCollection122() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection122.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet("parentcommunity1field", "text", false))) + ); + + getClient().perform(get("/api/discover/facets/parentcommunity1field") + .param("scope", String.valueOf(collection122.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-item122", 1), + FacetValueMatcher.matchEntry("parentcommunity1field", + "parentcommunity1field-mappedItem122211", 1) + ) + )); + } + + @Test + public void ScopeBasedIndexingAndSearchTestParentCommunity2() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(community2.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false), + FacetEntryMatcher.entityTypeFacet(false) + )) + ); + } + + @Test + public void ScopeBasedIndexingAndSearchTestSubCommunity21() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(subcommunity21.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet("subcommunity21field", "text", false))) + ); + + getClient().perform(get("/api/discover/facets/subcommunity21field") + .param("scope", String.valueOf(subcommunity21.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("subcommunity21field", + "subcommunity21field-item211", 1), + FacetValueMatcher.matchEntry("subcommunity21field", + "subcommunity21field-item212", 1), + FacetValueMatcher.matchEntry("subcommunity21field", + "subcommunity21field-mappedItem122211", 1) + ) + )); + } + + @Test + public void ScopeBasedIndexingAndSearchTestCollection211() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection211.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet("collection211field", "text", false))) + ); + + getClient().perform(get("/api/discover/facets/collection211field") + .param("scope", String.valueOf(collection211.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("collection211field", + "collection211field-item211", 1), + FacetValueMatcher.matchEntry("collection211field", + "collection211field-mappedItem122211", 1) + ) + )); + } + + @Test + public void ScopeBasedIndexingAndSearchTestCollection212() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection212.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet("subcommunity21field", "text", false))) + ); + + getClient().perform(get("/api/discover/facets/subcommunity21field") + .param("scope", String.valueOf(collection212.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("subcommunity21field", + "subcommunity21field-item212", 1) + ) + )); + } + + @Test + public void ScopeBasedIndexingAndSearchTestSubcommunity22() throws Exception { + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(subcommunity22.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false), + FacetEntryMatcher.entityTypeFacet(false) + )) + ); + } + + @Test + public void ScopeBasedIndexingAndSearchTestCollection221() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection221.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.matchFacet("collection221field", "text", false))) + ); + + getClient().perform(get("/api/discover/facets/collection221field") + .param("scope", String.valueOf(collection221.getID()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._embedded.values", + containsInAnyOrder( + FacetValueMatcher.matchEntry("collection221field", + "collection221field-item221", 1) + ) + )); + } + + @Test + public void ScopeBasedIndexingAndSearchTestCollection222() throws Exception { + + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection222.getID()))) + + .andExpect(status().isOk()) + .andExpect(jsonPath("$.type", is("discover"))) + .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) + .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( + FacetEntryMatcher.authorFacet(false), + FacetEntryMatcher.subjectFacet(false), + FacetEntryMatcher.dateIssuedFacet(false), + FacetEntryMatcher.hasContentInOriginalBundleFacet(false), + FacetEntryMatcher.entityTypeFacet(false) + )) + ); + } + + +} diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetEntryMatcher.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetEntryMatcher.java index 5e3c477506..34b7b8b30d 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetEntryMatcher.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetEntryMatcher.java @@ -99,6 +99,17 @@ public class FacetEntryMatcher { ); } + public static Matcher matchFacet(String name, String facetType, boolean hasNext) { + return allOf( + hasJsonPath("$.name", is(name)), + hasJsonPath("$.facetType", is(facetType)), + hasJsonPath("$.facetLimit", any(Integer.class)), + hasJsonPath("$._links.self.href", containsString("api/discover/facets/" + name)), + hasJsonPath("$._links", matchNextLink(hasNext, "api/discover/facets/" + name)) + ); + } + + /** * Check that a facet over the dc.type exists and match the default configuration * diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetValueMatcher.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetValueMatcher.java index a68356da53..1efafb5406 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetValueMatcher.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetValueMatcher.java @@ -52,6 +52,16 @@ public class FacetValueMatcher { ); } + public static Matcher matchEntry(String facet, String label, int count) { + return allOf( + hasJsonPath("$.label", is(label)), + hasJsonPath("$.type", is("discover")), + hasJsonPath("$.count", is(count)), + hasJsonPath("$._links.search.href", containsString("api/discover/search/objects")), + hasJsonPath("$._links.search.href", containsString("f." + facet + "=" + label + ",equals")) + ); + } + public static Matcher entrySubject(String label, String authority, int count) { return allOf( diff --git a/machine.cfg b/machine.cfg new file mode 100644 index 0000000000..14f0d1d0b0 --- /dev/null +++ b/machine.cfg @@ -0,0 +1,19 @@ +dspace.shortname = or-platform-7 + +dspace.dir=/Users/yana/dspaces/or-platform-7 + +dspace.server.url =http://localhost:8080/server-or7 +dspace.ui.url = http://localhost:4000 + +# URL for connecting to database +# * Postgres template: jdbc:postgrook naar de toekomst toe wilt dat zeggen dat de backend gewoon in orde is en mogelijk enkel nog eesql://localhost:5432/dspace +# * Oracle template: jdbc:oracle:thin:@//localhost:1521/xe +#db.url = ${db.url} +#db.url = jdbc:postgresql://localhost:5432/or-platform-7 +db.url = jdbc:postgresql://localhost:5434/or-platform-7-4 + + + +solr.server = http://localhost:8983/solr + + From c538b9cbedd2d7ab7ab88b912a5eeb75a180e10d Mon Sep 17 00:00:00 2001 From: Yana De Pauw Date: Tue, 13 Dec 2022 14:27:29 +0100 Subject: [PATCH 006/125] Add docs and remove unused site configuration --- .../org/dspace/discovery/SearchUtils.java | 19 ++- .../DiscoveryConfigurationService.java | 15 ++ .../config/spring/api/discovery.xml | 133 +----------------- .../DiscoveryScopeBasedRestControllerIT.java | 56 ++++++-- dspace/config/spring/api/discovery.xml | 129 ----------------- 5 files changed, 77 insertions(+), 275 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java b/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java index 4085e1bbdf..418720be4a 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java +++ b/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java @@ -77,6 +77,7 @@ public class SearchUtils { * * * @param context + * the database context * @param prefix * the namespace of the configuration to lookup if any * @param dso @@ -92,6 +93,15 @@ public class SearchUtils { } } + /** + * Retrieve the configuration for the current dspace object and all its parents and add it to the provided set + * @param context - The database context + * @param configurations - The set of configurations to add the retrieved configurations to + * @param prefix - The namespace of the configuration to lookup if any + * @param dso - The DSpace Object + * @return the set of configurations with additional retrieved ones for the dspace object and parents + * @throws SQLException + */ public static Set addDiscoveryConfigurationForParents( Context context, Set configurations, String prefix, DSpaceObject dso) throws SQLException { @@ -124,6 +134,13 @@ public class SearchUtils { return configurationService.getDiscoveryConfiguration(configurationName); } + + /** + * Return the discovery configuration for the provided DSO + * @param context - The database context + * @param dso - The DSpace object to retrieve the configuration for + * @return the discovery configuration for the provided DSO + */ public static DiscoveryConfiguration getDiscoveryConfigurationByDSO( Context context, DSpaceObject dso) { DiscoveryConfigurationService configurationService = getConfigurationService(); @@ -145,7 +162,7 @@ public class SearchUtils { * A configuration object can be returned for each parent community/collection * * @param item the DSpace item - * @param context + * @param context the database context * @return a list of configuration objects * @throws SQLException An exception that provides information on a database access error or other errors. */ diff --git a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java index 22443aec22..c0eba58669 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java +++ b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java @@ -63,6 +63,13 @@ public class DiscoveryConfigurationService { return getDiscoveryConfiguration(name); } + /** + * Retrieve the discovery configuration for the provided DSO. When no direct match is found, the parent object will + * be checked until there is no parent left, in which case the "default" configuration will be returned. + * @param context - The database context + * @param dso - The DSpace object to retrieve the configuration for + * @return the discovery configuration for the provided DSO. + */ public DiscoveryConfiguration getDiscoveryDSOConfiguration(final Context context, DSpaceObject dso) { String name; @@ -91,6 +98,14 @@ public class DiscoveryConfigurationService { return getDiscoveryConfiguration(name, true); } + /** + * Retrieve the configuration for the provided name. When useDefault is set to true, the "default" configuration + * will be returned when no match is found. When useDefault is set to false, null will be returned when no match is + * found. + * @param name - The name of the configuration to retrieve + * @param useDefault - Whether the default configuration should be used when no match is found + * @return the configuration for the provided name + */ public DiscoveryConfiguration getDiscoveryConfiguration(final String name, boolean useDefault) { DiscoveryConfiguration result; diff --git a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/discovery.xml b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/discovery.xml index 6ffcbe661c..e029c65aa0 100644 --- a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/discovery.xml +++ b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/discovery.xml @@ -50,9 +50,6 @@ --> - - - @@ -77,6 +74,7 @@ + @@ -543,121 +541,6 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java index a0edf1a0c7..15c1019584 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java @@ -29,12 +29,42 @@ import org.junit.Before; import org.junit.Test; import org.springframework.beans.factory.annotation.Autowired; +/** + * This class tests the correct inheritance of Discovery configurations for sub communities and collections. + * To thoroughly test this, a community and collection structure is set up to where different communities have custom + * configurations configured for them. + * + * The following structure is uses: + * - Parent Community 1 - Custom configuration: discovery-parent-community-1 + * -- Subcommunity 11 - Custom configuration: discovery-sub-community-1-1 + * -- Collection 111 - Custom configuration: discovery-collection-1-1-1 + * -- Collection 112 + * -- Subcommunity 12 + * -- Collection 121 - Custom configuration: discovery-collection-1-2-1 + * -- Collection 122 + * - Parent Community 2 + * -- Subcommunity 21 - Custom configuration: discovery-sub-community-2-1 + * -- Collection 211 - Custom configuration: discovery-collection-2-1-1 + * -- Collection 212 + * -- Subcommunity 22 + * -- Collection 221 - Custom configuration: discovery-collection-2-2-1 + * -- Collection 222 + * + * Each custom configuration contains a unique index for a unique metadata field, to verify if correct information is + * indexed and provided for the different search scopes. + * + * Each collection has an item in it. Next to these items, there are two mapped items, one in collection 111 and 222, + * and one in collection 122 and 211. + * + * The tests will verify that for each object, the correct facets are provided and that all the necessary fields to + * power these facets are indexed properly. + */ public class DiscoveryScopeBasedRestControllerIT extends AbstractControllerIntegrationTest { @Autowired CollectionService collectionService; - private Community community1; + private Community parentCommunity1; private Community subcommunity11; private Community subcommunity12; private Collection collection111; @@ -42,7 +72,7 @@ public class DiscoveryScopeBasedRestControllerIT extends AbstractControllerInteg private Collection collection121; private Collection collection122; - private Community community2; + private Community parentCommunity2; private Community subcommunity21; private Community subcommunity22; private Collection collection211; @@ -64,13 +94,13 @@ public class DiscoveryScopeBasedRestControllerIT extends AbstractControllerInteg MetadataFieldBuilder.createMetadataField(context, "test", "collection211field", "").build(); MetadataFieldBuilder.createMetadataField(context, "test", "collection221field", "").build(); - community1 = CommunityBuilder.createCommunity(context, "123456789/discovery-parent-community-1") - .build(); + parentCommunity1 = CommunityBuilder.createCommunity(context, "123456789/discovery-parent-community-1") + .build(); subcommunity11 = CommunityBuilder - .createSubCommunity(context, community1, "123456789/discovery-sub-community-1-1") + .createSubCommunity(context, parentCommunity1, "123456789/discovery-sub-community-1-1") .build(); subcommunity12 = CommunityBuilder - .createSubCommunity(context, community1, "123456789/discovery-sub-community-1-2") + .createSubCommunity(context, parentCommunity1, "123456789/discovery-sub-community-1-2") .build(); collection111 = CollectionBuilder .createCollection(context, subcommunity11, "123456789/discovery-collection-1-1-1") @@ -86,15 +116,15 @@ public class DiscoveryScopeBasedRestControllerIT extends AbstractControllerInteg .createCollection(context, subcommunity12, "123456789/discovery-collection-1-2-2") .build(); - community2 = CommunityBuilder.createCommunity(context, "123456789/discovery-parent-community-2") - .build(); + parentCommunity2 = CommunityBuilder.createCommunity(context, "123456789/discovery-parent-community-2") + .build(); subcommunity21 = CommunityBuilder - .createSubCommunity(context, community2, "123456789/discovery-sub-community-2-1") + .createSubCommunity(context, parentCommunity2, "123456789/discovery-sub-community-2-1") .build(); subcommunity22 = CommunityBuilder - .createSubCommunity(context, community2, "123456789/discovery-sub-community-2-2") + .createSubCommunity(context, parentCommunity2, "123456789/discovery-sub-community-2-2") .build(); collection211 = CollectionBuilder .createCollection(context, subcommunity21, "123456789/discovery-collection-2-1-1") @@ -235,7 +265,7 @@ public class DiscoveryScopeBasedRestControllerIT extends AbstractControllerInteg @Test public void ScopeBasedIndexingAndSearchTestParentCommunity1() throws Exception { - getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(community1.getID()))) + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(parentCommunity1.getID()))) .andExpect(status().isOk()) .andExpect(jsonPath("$.type", is("discover"))) @@ -246,7 +276,7 @@ public class DiscoveryScopeBasedRestControllerIT extends AbstractControllerInteg ); getClient().perform(get("/api/discover/facets/parentcommunity1field") - .param("scope", String.valueOf(community1.getID()))) + .param("scope", String.valueOf(parentCommunity1.getID()))) .andExpect(status().isOk()) .andExpect(jsonPath("$.type", is("discover"))) .andExpect(jsonPath("$._embedded.values", @@ -435,7 +465,7 @@ public class DiscoveryScopeBasedRestControllerIT extends AbstractControllerInteg @Test public void ScopeBasedIndexingAndSearchTestParentCommunity2() throws Exception { - getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(community2.getID()))) + getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(parentCommunity2.getID()))) .andExpect(status().isOk()) .andExpect(jsonPath("$.type", is("discover"))) diff --git a/dspace/config/spring/api/discovery.xml b/dspace/config/spring/api/discovery.xml index 4392e02cb3..ae1992fbff 100644 --- a/dspace/config/spring/api/discovery.xml +++ b/dspace/config/spring/api/discovery.xml @@ -50,9 +50,6 @@ --> - - - @@ -534,120 +531,6 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - From 463edac869855150b3bb1c6e2f31c8a97482a633 Mon Sep 17 00:00:00 2001 From: Yana De Pauw Date: Tue, 13 Dec 2022 17:08:02 +0100 Subject: [PATCH 007/125] Remove local file --- machine.cfg | 19 ------------------- 1 file changed, 19 deletions(-) delete mode 100644 machine.cfg diff --git a/machine.cfg b/machine.cfg deleted file mode 100644 index 14f0d1d0b0..0000000000 --- a/machine.cfg +++ /dev/null @@ -1,19 +0,0 @@ -dspace.shortname = or-platform-7 - -dspace.dir=/Users/yana/dspaces/or-platform-7 - -dspace.server.url =http://localhost:8080/server-or7 -dspace.ui.url = http://localhost:4000 - -# URL for connecting to database -# * Postgres template: jdbc:postgrook naar de toekomst toe wilt dat zeggen dat de backend gewoon in orde is en mogelijk enkel nog eesql://localhost:5432/dspace -# * Oracle template: jdbc:oracle:thin:@//localhost:1521/xe -#db.url = ${db.url} -#db.url = jdbc:postgresql://localhost:5432/or-platform-7 -db.url = jdbc:postgresql://localhost:5434/or-platform-7-4 - - - -solr.server = http://localhost:8983/solr - - From 14534b4eafb8f5333440a624f07395b2cb2f14eb Mon Sep 17 00:00:00 2001 From: Yana De Pauw Date: Tue, 13 Dec 2022 17:47:35 +0100 Subject: [PATCH 008/125] Move context to first argument in getDiscoveryConfigurationByNameOrDso --- .../configuration/DiscoveryConfigurationService.java | 4 ++-- .../app/rest/repository/DiscoveryRestRepository.java | 10 +++++----- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java index c0eba58669..d7bc3b0f35 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java +++ b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java @@ -119,8 +119,8 @@ public class DiscoveryConfigurationService { return result; } - public DiscoveryConfiguration getDiscoveryConfigurationByNameOrDso(final String configurationName, - final Context context, + public DiscoveryConfiguration getDiscoveryConfigurationByNameOrDso(final Context context, + final String configurationName, final IndexableObject dso) { if (StringUtils.isNotBlank(configurationName) && getMap().containsKey(configurationName)) { return getMap().get(configurationName); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DiscoveryRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DiscoveryRestRepository.java index 1962d44162..e337e76ef2 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DiscoveryRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DiscoveryRestRepository.java @@ -84,7 +84,7 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository { IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope); DiscoveryConfiguration discoveryConfiguration = searchConfigurationService - .getDiscoveryConfigurationByNameOrDso(configuration, context, scopeObject); + .getDiscoveryConfigurationByNameOrDso(context, configuration, scopeObject); return discoverConfigurationConverter.convert(discoveryConfiguration, utils.obtainProjection()); } @@ -96,7 +96,7 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository { Context context = obtainContext(); IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope); DiscoveryConfiguration discoveryConfiguration = searchConfigurationService - .getDiscoveryConfigurationByNameOrDso(configuration, context, scopeObject); + .getDiscoveryConfigurationByNameOrDso(context, configuration, scopeObject); DiscoverResult searchResult = null; DiscoverQuery discoverQuery = null; @@ -121,7 +121,7 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository { IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope); DiscoveryConfiguration discoveryConfiguration = searchConfigurationService - .getDiscoveryConfigurationByNameOrDso(configuration, context, scopeObject); + .getDiscoveryConfigurationByNameOrDso(context, configuration, scopeObject); return discoverFacetConfigurationConverter.convert(configuration, dsoScope, discoveryConfiguration); } @@ -138,7 +138,7 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository { IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope); DiscoveryConfiguration discoveryConfiguration = searchConfigurationService - .getDiscoveryConfigurationByNameOrDso(configuration, context, scopeObject); + .getDiscoveryConfigurationByNameOrDso(context, configuration, scopeObject); DiscoverQuery discoverQuery = queryBuilder.buildFacetQuery(context, scopeObject, discoveryConfiguration, prefix, query, searchFilters, dsoTypes, page, facetName); @@ -157,7 +157,7 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository { Pageable page = PageRequest.of(1, 1); IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope); DiscoveryConfiguration discoveryConfiguration = searchConfigurationService - .getDiscoveryConfigurationByNameOrDso(configuration, context, scopeObject); + .getDiscoveryConfigurationByNameOrDso(context, configuration, scopeObject); DiscoverResult searchResult = null; DiscoverQuery discoverQuery = null; From 38b30c394c982c4760a8afc9676bfbe139de5e10 Mon Sep 17 00:00:00 2001 From: Yana De Pauw Date: Wed, 14 Dec 2022 10:32:54 +0100 Subject: [PATCH 009/125] Fix openSearchController issue --- .../src/main/java/org/dspace/app/rest/OpenSearchController.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/OpenSearchController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/OpenSearchController.java index 79ca381753..665504139c 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/OpenSearchController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/OpenSearchController.java @@ -176,7 +176,7 @@ public class OpenSearchController { if (dsoObject != null) { container = scopeResolver.resolveScope(context, dsoObject); DiscoveryConfiguration discoveryConfiguration = searchConfigurationService - .getDiscoveryConfigurationByNameOrDso("site", container); + .getDiscoveryConfiguration(context, container); queryArgs.setDiscoveryConfigurationName(discoveryConfiguration.getId()); queryArgs.addFilterQueries(discoveryConfiguration.getDefaultFilterQueries() .toArray( From 69500ad5d579f6891bbf35c35e29b18f120b20e9 Mon Sep 17 00:00:00 2001 From: Yana De Pauw Date: Thu, 15 Dec 2022 11:55:05 +0100 Subject: [PATCH 010/125] Fix discovery test config and make ids for relationship profiles unique --- .../config/spring/api/discovery.xml | 733 +++++++++++++++++- dspace/config/spring/api/discovery.xml | 14 +- 2 files changed, 701 insertions(+), 46 deletions(-) diff --git a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/discovery.xml b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/discovery.xml index e029c65aa0..a5d7682d4c 100644 --- a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/discovery.xml +++ b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/discovery.xml @@ -48,12 +48,15 @@ the key is used to refer to the page (the "site" or a community/collection handle) the value-ref is a reference to an identifier of the DiscoveryConfiguration format --> - - - - - - + + + + + + + + + @@ -61,17 +64,48 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -176,7 +210,145 @@ + (search.resourcetype:Item AND latestVersion:true) OR search.resourcetype:Collection OR search.resourcetype:Community + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + -withdrawn:true AND -discoverable:false @@ -313,7 +485,7 @@ - search.resourcetype:Item + search.resourcetype:Item AND latestVersion:true withdrawn:true OR discoverable:false @@ -455,7 +627,7 @@ - search.resourcetype:Item + search.resourcetype:Item AND latestVersion:true @@ -541,10 +713,11 @@ + + class="org.dspace.discovery.configuration.DiscoveryConfiguration" + scope="prototype"> @@ -579,7 +752,7 @@ - search.resourcetype:Item OR search.resourcetype:WorkspaceItem OR search.resourcetype:XmlWorkflowItem + (search.resourcetype:Item AND latestVersion:true) OR search.resourcetype:WorkspaceItem OR search.resourcetype:XmlWorkflowItem @@ -616,8 +789,8 @@ + class="org.dspace.discovery.configuration.DiscoveryConfiguration" + scope="prototype"> @@ -691,8 +864,8 @@ + class="org.dspace.discovery.configuration.DiscoveryConfiguration" + scope="prototype"> @@ -814,7 +987,79 @@ + search.resourcetype:Item AND latestVersion:true AND entityType_keyword:Publication + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND entityType_keyword:Publication + -withdrawn:true AND -discoverable:false @@ -875,7 +1120,71 @@ + search.resourcetype:Item AND latestVersion:true AND entityType_keyword:Person + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND entityType_keyword:Person + -withdrawn:true AND -discoverable:false @@ -928,7 +1237,63 @@ + search.resourcetype:Item AND latestVersion:true AND entityType_keyword:Project + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND entityType_keyword:Project + -withdrawn:true AND -discoverable:false @@ -990,7 +1355,73 @@ + search.resourcetype:Item AND latestVersion:true AND entityType_keyword:OrgUnit + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND entityType_keyword:OrgUnit + -withdrawn:true AND -discoverable:false @@ -1049,7 +1480,69 @@ + search.resourcetype:Item AND latestVersion:true AND entityType_keyword:JournalIssue + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND entityType_keyword:JournalIssue + -withdrawn:true AND -discoverable:false @@ -1107,7 +1600,68 @@ + search.resourcetype:Item AND latestVersion:true AND entityType_keyword:JournalVolume + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND entityType_keyword:JournalVolume + -withdrawn:true AND -discoverable:false @@ -1165,7 +1719,68 @@ + search.resourcetype:Item AND latestVersion:true AND entityType_keyword:Journal + -withdrawn:true AND -discoverable:false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item AND entityType_keyword:Journal + -withdrawn:true AND -discoverable:false @@ -1238,7 +1853,8 @@ - search.resourcetype:Item AND (entityType_keyword:OrgUnit OR entityType_keyword:Person) + search.resourcetype:Item AND latestVersion:true AND (entityType_keyword:OrgUnit OR entityType_keyword:Person) + -withdrawn:true AND -discoverable:false @@ -1293,7 +1909,8 @@ - search.resourcetype:Item AND entityType_keyword:OrgUnit AND dc.type:FundingOrganization + search.resourcetype:Item AND latestVersion:true AND entityType_keyword:OrgUnit AND dc.type:FundingOrganization + -withdrawn:true AND -discoverable:false @@ -1302,6 +1919,44 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item + search.entitytype:${researcher-profile.entity-type:Person} + -withdrawn:true AND -discoverable:false + + + + + + + + + @@ -2288,7 +2943,7 @@ - relation.isAuthorOfPublication + relation.isAuthorOfPublication.latestForDiscovery @@ -2299,7 +2954,7 @@ - relation.isProjectOfPublication + relation.isProjectOfPublication.latestForDiscovery @@ -2311,7 +2966,7 @@ - relation.isOrgUnitOfPublication + relation.isOrgUnitOfPublication.latestForDiscovery @@ -2322,7 +2977,7 @@ - relation.isPublicationOfJournalIssue + relation.isPublicationOfJournalIssue.latestForDiscovery @@ -2333,7 +2988,7 @@ - relation.isJournalOfPublication + relation.isJournalOfPublication.latestForDiscovery @@ -2539,7 +3194,7 @@ - relation.isOrgUnitOfPerson + relation.isOrgUnitOfPerson.latestForDiscovery @@ -2550,7 +3205,7 @@ - relation.isProjectOfPerson + relation.isProjectOfPerson.latestForDiscovery @@ -2562,7 +3217,7 @@ - relation.isPublicationOfAuthor + relation.isPublicationOfAuthor.latestForDiscovery @@ -2634,7 +3289,7 @@ - relation.isPersonOfOrgUnit + relation.isPersonOfOrgUnit.latestForDiscovery @@ -2645,7 +3300,7 @@ - relation.isProjectOfOrgUnit + relation.isProjectOfOrgUnit.latestForDiscovery @@ -2657,7 +3312,7 @@ - relation.isPublicationOfOrgUnit + relation.isPublicationOfOrgUnit.latestForDiscovery @@ -2711,7 +3366,7 @@ - relation.isPublicationOfJournalIssue + relation.isPublicationOfJournalIssue.latestForDiscovery @@ -2734,7 +3389,7 @@ - relation.isIssueOfJournalVolume + relation.isIssueOfJournalVolume.latestForDiscovery @@ -2745,7 +3400,7 @@ - relation.isJournalOfVolume + relation.isJournalOfVolume.latestForDiscovery @@ -2786,7 +3441,7 @@ - relation.isVolumeOfJournal + relation.isVolumeOfJournal.latestForDiscovery @@ -2811,7 +3466,7 @@ - relation.isOrgUnitOfProject + relation.isOrgUnitOfProject.latestForDiscovery @@ -2823,7 +3478,7 @@ - relation.isPersonOfProject + relation.isPersonOfProject.latestForDiscovery @@ -2835,7 +3490,7 @@ - relation.isPublicationOfProject + relation.isPublicationOfProject.latestForDiscovery @@ -2846,7 +3501,7 @@ - relation.isContributorOfPublication + relation.isContributorOfPublication.latestForDiscovery @@ -2857,7 +3512,7 @@ - relation.isPublicationOfContributor + relation.isPublicationOfContributor.latestForDiscovery @@ -2868,7 +3523,7 @@ - relation.isFundingAgencyOfProject + relation.isFundingAgencyOfProject.latestForDiscovery @@ -2879,7 +3534,7 @@ - relation.isProjectOfFundingAgency + relation.isProjectOfFundingAgency.latestForDiscovery diff --git a/dspace/config/spring/api/discovery.xml b/dspace/config/spring/api/discovery.xml index 5e2cae5e9f..37d5f2548a 100644 --- a/dspace/config/spring/api/discovery.xml +++ b/dspace/config/spring/api/discovery.xml @@ -996,7 +996,7 @@ - + @@ -1129,7 +1129,7 @@ - + @@ -1246,7 +1246,7 @@ - + @@ -1366,7 +1366,7 @@ - + @@ -1491,7 +1491,7 @@ - + @@ -1611,7 +1611,7 @@ - + @@ -1730,7 +1730,7 @@ - + From 1300cdc75b25181fdeebda20661aaa02b2d92bfc Mon Sep 17 00:00:00 2001 From: Yury Bondarenko Date: Mon, 19 Dec 2022 11:20:53 +0100 Subject: [PATCH 011/125] 97248: Cache discovery configurations by UUID --- .../DiscoveryConfigurationService.java | 47 +++++++++++++------ 1 file changed, 33 insertions(+), 14 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java index d7bc3b0f35..7d5b435555 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java +++ b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java @@ -12,6 +12,7 @@ import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.UUID; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.LogManager; @@ -34,6 +35,12 @@ public class DiscoveryConfigurationService { private Map map; private Map> toIgnoreMetadataFields = new HashMap<>(); + /** + * Discovery configurations, cached by DSO UUID. When a DSO doesn't have its own configuration, we take the one of + * the first parent that does. This cache ensures we don't have to go up the hierarchy every time. + */ + private final Map uuidMap = new HashMap<>(); + public Map getMap() { return map; } @@ -72,26 +79,38 @@ public class DiscoveryConfigurationService { */ public DiscoveryConfiguration getDiscoveryDSOConfiguration(final Context context, DSpaceObject dso) { - String name; + // Fall back to default configuration if (dso == null) { - name = "default"; - } else { - name = dso.getHandle(); + return getDiscoveryConfiguration("default", false); } - DiscoveryConfiguration configuration = getDiscoveryConfiguration(name, false); - if (configuration != null) { - return configuration; + // Attempt to retrieve cached configuration by UUID + if (uuidMap.containsKey(dso.getID())) { + return uuidMap.get(dso.getID()); } - DSpaceObjectService dSpaceObjectService = + + DiscoveryConfiguration configuration; + + // Attempt to retrieve configuration by DSO handle + configuration = getDiscoveryConfiguration(dso.getHandle(), false); + + if (configuration == null) { + // Recurse up the Comm/Coll hierarchy until a configuration is found + DSpaceObjectService dSpaceObjectService = ContentServiceFactory.getInstance().getDSpaceObjectService(dso); - DSpaceObject parentObject = null; - try { - parentObject = dSpaceObjectService.getParentObject(context, dso); - } catch (SQLException e) { - log.error(e); + DSpaceObject parentObject = null; + try { + parentObject = dSpaceObjectService.getParentObject(context, dso); + } catch (SQLException e) { + log.error(e); + } + configuration = getDiscoveryDSOConfiguration(context, parentObject); } - return getDiscoveryDSOConfiguration(context, parentObject); + + // Cache the resulting configuration + uuidMap.put(dso.getID(), configuration); + + return configuration; } public DiscoveryConfiguration getDiscoveryConfiguration(final String name) { From 9675a0e9b1277679d4010405461cc87c8b81771e Mon Sep 17 00:00:00 2001 From: "Mark H. Wood" Date: Mon, 6 Mar 2023 11:41:14 -0500 Subject: [PATCH 012/125] Move DateMathParser from instance variables to locals, so "now" is current time not startup time. Clamp access condition dates to midnight UTC. Add a lot of debug logging and a test main(). --- .../submit/model/AccessConditionOption.java | 59 +++++++++++------ .../java/org/dspace/util/DateMathParser.java | 65 ++++++++++++++++--- .../java/org/dspace/util/TimeHelpers.java | 42 ++++++++++++ .../SubmissionAccessOptionConverter.java | 12 ++-- .../SubmissionUploadRestRepository.java | 17 +++-- .../AccessConditionAddPatchOperation.java | 12 +++- .../AccessConditionReplacePatchOperation.java | 18 ++--- 7 files changed, 171 insertions(+), 54 deletions(-) create mode 100644 dspace-api/src/main/java/org/dspace/util/TimeHelpers.java diff --git a/dspace-api/src/main/java/org/dspace/submit/model/AccessConditionOption.java b/dspace-api/src/main/java/org/dspace/submit/model/AccessConditionOption.java index dbbb7bbc5e..b29af4e773 100644 --- a/dspace-api/src/main/java/org/dspace/submit/model/AccessConditionOption.java +++ b/dspace-api/src/main/java/org/dspace/submit/model/AccessConditionOption.java @@ -11,6 +11,8 @@ import java.text.ParseException; import java.util.Date; import java.util.Objects; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.ResourcePolicy; import org.dspace.authorize.service.AuthorizeService; @@ -21,6 +23,7 @@ import org.dspace.core.Context; import org.dspace.eperson.Group; import org.dspace.eperson.service.GroupService; import org.dspace.util.DateMathParser; +import org.dspace.util.TimeHelpers; import org.springframework.beans.factory.annotation.Autowired; /** @@ -28,9 +31,8 @@ import org.springframework.beans.factory.annotation.Autowired; * set permission on a file. An option is defined by a name such as "open * access", "embargo", "restricted access", etc. and some optional attributes to * better clarify the constraints and input available to the user. For instance - * an embargo option could allow to set a start date not longer than 3 years, - * etc - * + * an embargo option could allow to set a start date not longer than 3 years. + * * @author Luigi Andrea Pascarelli (luigiandrea.pascarelli at 4science.it) */ public class AccessConditionOption { @@ -44,9 +46,9 @@ public class AccessConditionOption { @Autowired private ResourcePolicyService resourcePolicyService; - DateMathParser dateMathParser = new DateMathParser(); + private static final Logger LOG = LogManager.getLogger(); - /** An unique name identifying the access contion option **/ + /** A unique name identifying the access condition option. **/ private String name; /** @@ -147,6 +149,9 @@ public class AccessConditionOption { * startDate should be null. Otherwise startDate may not be null. * @param endDate end date of the resource policy. If {@link #getHasEndDate()} returns false, * endDate should be null. Otherwise endDate may not be null. + * @throws SQLException passed through. + * @throws AuthorizeException passed through. + * @throws ParseException passed through (indicates problem with a date). */ public void createResourcePolicy(Context context, DSpaceObject obj, String name, String description, Date startDate, Date endDate) @@ -160,7 +165,7 @@ public class AccessConditionOption { /** * Validate ResourcePolicy and after update it - * + * * @param context DSpace context * @param resourcePolicy ResourcePolicy to update * @throws SQLException If database error @@ -175,17 +180,25 @@ public class AccessConditionOption { } /** - * Validate the policy properties, throws exceptions if any is not valid - * - * @param context DSpace context - * @param name Name of the resource policy - * @param startDate Start date of the resource policy. If {@link #getHasStartDate()} - * returns false, startDate should be null. Otherwise startDate may not be null. - * @param endDate End date of the resource policy. If {@link #getHasEndDate()} - * returns false, endDate should be null. Otherwise endDate may not be null. + * Validate the policy properties, throws exceptions if any is not valid. + * + * @param context DSpace context. + * @param name Name of the resource policy. + * @param startDate Start date of the resource policy. If + * {@link #getHasStartDate()} returns false, startDate + * should be null. Otherwise startDate may not be null. + * @param endDate End date of the resource policy. If + * {@link #getHasEndDate()} returns false, endDate should + * be null. Otherwise endDate may not be null. + * @throws IllegalStateException if a date is required and absent, + * a date is not required and present, or a date exceeds its + * configured maximum. + * @throws ParseException passed through. */ private void validateResourcePolicy(Context context, String name, Date startDate, Date endDate) - throws SQLException, AuthorizeException, ParseException { + throws IllegalStateException, ParseException { + LOG.debug("Validate policy dates: name '{}', startDate {}, endDate {}", + name, startDate, endDate); if (getHasStartDate() && Objects.isNull(startDate)) { throw new IllegalStateException("The access condition " + getName() + " requires a start date."); } @@ -199,29 +212,33 @@ public class AccessConditionOption { throw new IllegalStateException("The access condition " + getName() + " cannot contain an end date."); } + DateMathParser dateMathParser = new DateMathParser(); + Date latestStartDate = null; if (Objects.nonNull(getStartDateLimit())) { - latestStartDate = dateMathParser.parseMath(getStartDateLimit()); + latestStartDate = TimeHelpers.toMidnightUTC(dateMathParser.parseMath(getStartDateLimit())); } Date latestEndDate = null; if (Objects.nonNull(getEndDateLimit())) { - latestEndDate = dateMathParser.parseMath(getEndDateLimit()); + latestEndDate = TimeHelpers.toMidnightUTC(dateMathParser.parseMath(getEndDateLimit())); } + LOG.debug(" latestStartDate {}, latestEndDate {}", + latestStartDate, latestEndDate); // throw if startDate after latestStartDate if (Objects.nonNull(startDate) && Objects.nonNull(latestStartDate) && startDate.after(latestStartDate)) { throw new IllegalStateException(String.format( - "The start date of access condition %s should be earlier than %s from now.", - getName(), getStartDateLimit() + "The start date of access condition %s should be earlier than %s from now (%s).", + getName(), getStartDateLimit(), dateMathParser.getNow() )); } // throw if endDate after latestEndDate if (Objects.nonNull(endDate) && Objects.nonNull(latestEndDate) && endDate.after(latestEndDate)) { throw new IllegalStateException(String.format( - "The end date of access condition %s should be earlier than %s from now.", - getName(), getEndDateLimit() + "The end date of access condition %s should be earlier than %s from now (%s).", + getName(), getEndDateLimit(), dateMathParser.getNow() )); } } diff --git a/dspace-api/src/main/java/org/dspace/util/DateMathParser.java b/dspace-api/src/main/java/org/dspace/util/DateMathParser.java index 7c3e13a28e..9ff252e8ce 100644 --- a/dspace-api/src/main/java/org/dspace/util/DateMathParser.java +++ b/dspace-api/src/main/java/org/dspace/util/DateMathParser.java @@ -26,12 +26,15 @@ import java.util.Map; import java.util.TimeZone; import java.util.regex.Pattern; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; /** - * This class (Apache license) is copied from Apache Solr and add some tweaks to resolve unneeded dependency: - * https://raw.githubusercontent.com/apache/lucene-solr/releases/lucene-solr/7.1.0/solr/core/src/java/org/apache/solr - * /util/DateMathParser.java + * This class (Apache license) is copied from Apache Solr, adding some tweaks to + * resolve an unneeded dependency. See + * the original. * + *

* A Simple Utility class for parsing "math" like strings relating to Dates. * *

@@ -78,7 +81,7 @@ import java.util.regex.Pattern; * "setNow" in the interim). The default value of 'now' is * the time at the moment the DateMathParser instance is * constructed, unless overridden by the {@link CommonParams#NOW NOW} - * request param. + * request parameter. *

* *

@@ -88,7 +91,7 @@ import java.util.regex.Pattern; * cascades to rounding of HOUR, MIN, MONTH, YEAR as well. The default * TimeZone used is UTC unless overridden by the * {@link CommonParams#TZ TZ} - * request param. + * request parameter. *

* *

@@ -102,6 +105,8 @@ import java.util.regex.Pattern; */ public class DateMathParser { + private static final Logger LOG = LogManager.getLogger(); + public static final TimeZone UTC = TimeZone.getTimeZone("UTC"); /** @@ -119,12 +124,12 @@ public class DateMathParser { /** * A mapping from (uppercased) String labels identifying time units, - * to the corresponding {@link ChronoUnit} enum (e.g. "YEARS") used to + * to the corresponding {@link ChronoUnit} value (e.g. "YEARS") used to * set/add/roll that unit of measurement. * *

* A single logical unit of time might be represented by multiple labels - * for convenience (ie: DATE==DAYS, + * for convenience (i.e. DATE==DAYS, * MILLI==MILLIS) *

* @@ -220,6 +225,7 @@ public class DateMathParser { * * @param now an optional fixed date to use as "NOW" * @param val the string to parse + * @return result of applying the parsed expression to "NOW". * @throws Exception */ public static Date parseMath(Date now, String val) throws Exception { @@ -308,6 +314,7 @@ public class DateMathParser { /** * Defines this instance's concept of "now". * + * @param n new value of "now". * @see #getNow */ public void setNow(Date n) { @@ -316,12 +323,12 @@ public class DateMathParser { /** * Returns a clone of this instance's concept of "now" (never null). - * * If setNow was never called (or if null was specified) then this method * first defines 'now' as the value dictated by the SolrRequestInfo if it * exists -- otherwise it uses a new Date instance at the moment getNow() * is first called. * + * @return "now". * @see #setNow * @see SolrRequestInfo#getNOW */ @@ -334,9 +341,12 @@ public class DateMathParser { } /** - * Parses a string of commands relative "now" are returns the resulting Date. + * Parses a date expression relative to "now". * - * @throws ParseException positions in ParseExceptions are token positions, not character positions. + * @param math a date expression such as "+24MONTHS". + * @return the result of applying the expression to the current time. + * @throws ParseException positions in ParseExceptions are token positions, + * not character positions. */ public Date parseMath(String math) throws ParseException { /* check for No-Op */ @@ -344,6 +354,8 @@ public class DateMathParser { return getNow(); } + LOG.debug("parsing {}", math); + ZoneId zoneId = zone.toZoneId(); // localDateTime is a date and time local to the timezone specified LocalDateTime localDateTime = ZonedDateTime.ofInstant(getNow().toInstant(), zoneId).toLocalDateTime(); @@ -394,11 +406,44 @@ public class DateMathParser { } } + LOG.debug("returning {}", localDateTime); return Date.from(ZonedDateTime.of(localDateTime, zoneId).toInstant()); } private static Pattern splitter = Pattern.compile("\\b|(?<=\\d)(?=\\D)"); + /** + * For manual testing. With one argument, test one-argument parseMath. + * With two (or more) arguments, test two-argument parseMath. + * + * @param argv date math expressions. + * @throws java.lang.Exception passed through. + */ + public static void main(String[] argv) + throws Exception { + DateMathParser parser = new DateMathParser(); + try { + Date parsed; + + if (argv.length <= 0) { + System.err.println("Date math expression(s) expected."); + } + + if (argv.length > 0) { + parsed = parser.parseMath(argv[0]); + System.out.format("Applied %s to implicit current time: %s%n", + argv[0], parsed.toString()); + } + + if (argv.length > 1) { + parsed = DateMathParser.parseMath(new Date(), argv[1]); + System.out.format("Applied %s to explicit current time: %s%n", + argv[1], parsed.toString()); + } + } catch (ParseException ex) { + System.err.format("Oops: %s%n", ex.getMessage()); + } + } } diff --git a/dspace-api/src/main/java/org/dspace/util/TimeHelpers.java b/dspace-api/src/main/java/org/dspace/util/TimeHelpers.java new file mode 100644 index 0000000000..87d354a7f6 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/util/TimeHelpers.java @@ -0,0 +1,42 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.util; + +import java.util.Date; +import java.util.GregorianCalendar; +import java.util.TimeZone; + +/** + * Various manipulations of dates and times. + * + * @author mwood + */ +public class TimeHelpers { + private static final TimeZone UTC = TimeZone.getTimeZone("UTC"); + + /** + * Never instantiate this class. + */ + private TimeHelpers() {} + + /** + * Set a Date's time to midnight UTC. + * + * @param from some date-time. + * @return midnight UTC of the supplied date-time. + */ + public static Date toMidnightUTC(Date from) { + GregorianCalendar calendar = new GregorianCalendar(UTC); + calendar.setTime(from); + calendar.set(GregorianCalendar.HOUR_OF_DAY, 0); + calendar.set(GregorianCalendar.MINUTE, 0); + calendar.set(GregorianCalendar.SECOND, 0); + calendar.set(GregorianCalendar.MILLISECOND, 0); + return calendar.getTime(); + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SubmissionAccessOptionConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SubmissionAccessOptionConverter.java index f46f596dba..c0eebcab10 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SubmissionAccessOptionConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SubmissionAccessOptionConverter.java @@ -6,7 +6,9 @@ * http://www.dspace.org/license/ */ package org.dspace.app.rest.converter; + import java.text.ParseException; +import java.util.Date; import org.apache.commons.lang3.StringUtils; import org.dspace.app.rest.model.AccessConditionOptionRest; @@ -15,6 +17,7 @@ import org.dspace.app.rest.projection.Projection; import org.dspace.submit.model.AccessConditionConfiguration; import org.dspace.submit.model.AccessConditionOption; import org.dspace.util.DateMathParser; +import org.dspace.util.TimeHelpers; import org.springframework.stereotype.Component; /** @@ -27,21 +30,21 @@ import org.springframework.stereotype.Component; public class SubmissionAccessOptionConverter implements DSpaceConverter { - DateMathParser dateMathParser = new DateMathParser(); - @Override public SubmissionAccessOptionRest convert(AccessConditionConfiguration config, Projection projection) { SubmissionAccessOptionRest model = new SubmissionAccessOptionRest(); model.setId(config.getName()); model.setCanChangeDiscoverable(config.getCanChangeDiscoverable()); model.setProjection(projection); + DateMathParser dateMathParser = new DateMathParser(); for (AccessConditionOption option : config.getOptions()) { AccessConditionOptionRest optionRest = new AccessConditionOptionRest(); optionRest.setHasStartDate(option.getHasStartDate()); optionRest.setHasEndDate(option.getHasEndDate()); if (StringUtils.isNotBlank(option.getStartDateLimit())) { try { - optionRest.setMaxStartDate(dateMathParser.parseMath(option.getStartDateLimit())); + Date requested = dateMathParser.parseMath(option.getStartDateLimit()); + optionRest.setMaxStartDate(TimeHelpers.toMidnightUTC(requested)); } catch (ParseException e) { throw new IllegalStateException("Wrong start date limit configuration for the access condition " + "option named " + option.getName()); @@ -49,7 +52,8 @@ public class SubmissionAccessOptionConverter } if (StringUtils.isNotBlank(option.getEndDateLimit())) { try { - optionRest.setMaxEndDate(dateMathParser.parseMath(option.getEndDateLimit())); + Date requested = dateMathParser.parseMath(option.getEndDateLimit()); + optionRest.setMaxEndDate(TimeHelpers.toMidnightUTC(requested)); } catch (ParseException e) { throw new IllegalStateException("Wrong end date limit configuration for the access condition " + "option named " + option.getName()); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/SubmissionUploadRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/SubmissionUploadRestRepository.java index ffefd1daac..24b6b96961 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/SubmissionUploadRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/SubmissionUploadRestRepository.java @@ -10,6 +10,7 @@ package org.dspace.app.rest.repository; import java.text.ParseException; import java.util.ArrayList; import java.util.Collection; +import java.util.Date; import java.util.List; import org.apache.commons.lang3.StringUtils; @@ -18,11 +19,11 @@ import org.dspace.app.rest.model.AccessConditionOptionRest; import org.dspace.app.rest.model.SubmissionUploadRest; import org.dspace.app.rest.projection.Projection; import org.dspace.core.Context; -import org.dspace.eperson.service.GroupService; import org.dspace.submit.model.AccessConditionOption; import org.dspace.submit.model.UploadConfiguration; import org.dspace.submit.model.UploadConfigurationService; import org.dspace.util.DateMathParser; +import org.dspace.util.TimeHelpers; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; @@ -47,11 +48,6 @@ public class SubmissionUploadRestRepository extends DSpaceRestRepository uploadConfigs = uploadConfigurationService.getMap().values(); Projection projection = utils.obtainProjection(); List results = new ArrayList<>(); - List configNames = new ArrayList(); + List configNames = new ArrayList<>(); for (UploadConfiguration uploadConfig : uploadConfigs) { if (!configNames.contains(uploadConfig.getName())) { configNames.add(uploadConfig.getName()); @@ -92,13 +88,15 @@ public class SubmissionUploadRestRepository extends DSpaceRestRepository { @@ -52,6 +54,12 @@ public class AccessConditionAddPatchOperation extends AddPatchOperation accessConditions = parseAccessConditions(path, value, absolutePath); + // Clamp access condition dates to midnight UTC + for (AccessConditionDTO condition : accessConditions) { + condition.setStartDate(TimeHelpers.toMidnightUTC(condition.getStartDate())); + condition.setEndDate(TimeHelpers.toMidnightUTC(condition.getEndDate())); + } + verifyAccessConditions(context, configuration, accessConditions); if (absolutePath.length == 1) { @@ -65,7 +73,7 @@ public class AccessConditionAddPatchOperation extends AddPatchOperation parseAccessConditions(String path, Object value, String[] split) { - List accessConditions = new ArrayList(); + List accessConditions = new ArrayList<>(); if (split.length == 1) { accessConditions = evaluateArrayObject((LateObjectEvaluator) value); } else if (split.length == 2) { diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/factory/impl/AccessConditionReplacePatchOperation.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/factory/impl/AccessConditionReplacePatchOperation.java index 0216628a6b..d2529cbca3 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/factory/impl/AccessConditionReplacePatchOperation.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/factory/impl/AccessConditionReplacePatchOperation.java @@ -6,6 +6,7 @@ * http://www.dspace.org/license/ */ package org.dspace.app.rest.submit.factory.impl; + import java.sql.SQLException; import java.text.ParseException; import java.text.SimpleDateFormat; @@ -29,6 +30,7 @@ import org.dspace.core.Context; import org.dspace.submit.model.AccessConditionConfiguration; import org.dspace.submit.model.AccessConditionConfigurationService; import org.dspace.submit.model.AccessConditionOption; +import org.dspace.util.TimeHelpers; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -106,7 +108,7 @@ public class AccessConditionReplacePatchOperation extends ReplacePatchOperation< return null; } - private AccessConditionDTO createDTO(ResourcePolicy rpToReplace, String attributeReplace, String valueToReplare) + private AccessConditionDTO createDTO(ResourcePolicy rpToReplace, String attributeReplace, String valueToReplace) throws ParseException { AccessConditionDTO accessCondition = new AccessConditionDTO(); accessCondition.setName(rpToReplace.getRpName()); @@ -114,13 +116,13 @@ public class AccessConditionReplacePatchOperation extends ReplacePatchOperation< accessCondition.setEndDate(rpToReplace.getEndDate()); switch (attributeReplace) { case "name": - accessCondition.setName(valueToReplare); + accessCondition.setName(valueToReplace); return accessCondition; case "startDate": - accessCondition.setStartDate(parseDate(valueToReplare)); + accessCondition.setStartDate(TimeHelpers.toMidnightUTC(parseDate(valueToReplace))); return accessCondition; case "endDate": - accessCondition.setEndDate(parseDate(valueToReplare)); + accessCondition.setEndDate(TimeHelpers.toMidnightUTC(parseDate(valueToReplace))); return accessCondition; default: throw new UnprocessableEntityException("The provided attribute: " @@ -128,17 +130,17 @@ public class AccessConditionReplacePatchOperation extends ReplacePatchOperation< } } - private void updatePolicy(Context context, String valueToReplare, String attributeReplace, + private void updatePolicy(Context context, String valueToReplace, String attributeReplace, ResourcePolicy rpToReplace) throws SQLException, AuthorizeException { switch (attributeReplace) { case "name": - rpToReplace.setRpName(valueToReplare); + rpToReplace.setRpName(valueToReplace); break; case "startDate": - rpToReplace.setStartDate(parseDate(valueToReplare)); + rpToReplace.setStartDate(TimeHelpers.toMidnightUTC(parseDate(valueToReplace))); break; case "endDate": - rpToReplace.setEndDate(parseDate(valueToReplare)); + rpToReplace.setEndDate(TimeHelpers.toMidnightUTC(parseDate(valueToReplace))); break; default: throw new IllegalArgumentException("Attribute to replace is not valid:" + attributeReplace); From ecb1f1747cdb90ec6718e2c7c6ff32f4f3f7de91 Mon Sep 17 00:00:00 2001 From: "Mark H. Wood" Date: Mon, 6 Mar 2023 16:53:26 -0500 Subject: [PATCH 013/125] Guard against null dates. How embarassing. --- .../impl/AccessConditionAddPatchOperation.java | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/factory/impl/AccessConditionAddPatchOperation.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/factory/impl/AccessConditionAddPatchOperation.java index 39ce35425e..750c50524d 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/factory/impl/AccessConditionAddPatchOperation.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/submit/factory/impl/AccessConditionAddPatchOperation.java @@ -10,6 +10,7 @@ package org.dspace.app.rest.submit.factory.impl; import java.sql.SQLException; import java.text.ParseException; import java.util.ArrayList; +import java.util.Date; import java.util.List; import javax.servlet.http.HttpServletRequest; @@ -56,8 +57,14 @@ public class AccessConditionAddPatchOperation extends AddPatchOperation Date: Wed, 29 Mar 2023 17:30:59 +0100 Subject: [PATCH 014/125] Fix for #8732 and #8750 --- .../VersionedDOIIdentifierProvider.java | 18 +++++++++--------- .../repository/IdentifierRestRepository.java | 3 ++- 2 files changed, 11 insertions(+), 10 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java b/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java index e7c786d5f8..a246906b54 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java +++ b/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java @@ -66,7 +66,7 @@ public class VersionedDOIIdentifierProvider extends DOIIdentifierProvider { try { history = versionHistoryService.findByItem(context, item); } catch (SQLException ex) { - throw new RuntimeException("A problem occured while accessing the database.", ex); + throw new RuntimeException("A problem occurred while accessing the database.", ex); } String doi = null; @@ -76,7 +76,7 @@ public class VersionedDOIIdentifierProvider extends DOIIdentifierProvider { return doi; } } catch (SQLException ex) { - log.error("Error while attemping to retrieve information about a DOI for " + log.error("Error while attempting to retrieve information about a DOI for " + contentServiceFactory.getDSpaceObjectService(dso).getTypeText(dso) + " with ID " + dso.getID() + ".", ex); throw new RuntimeException("Error while attempting to retrieve " @@ -126,7 +126,7 @@ public class VersionedDOIIdentifierProvider extends DOIIdentifierProvider { "A problem with the database connection occurd while processing DOI " + versionedDOI + ".", ex); throw new RuntimeException("A problem with the database connection occured.", ex); } - return versionedDOI; + return DOI.SCHEME + versionedDOI; } } @@ -134,7 +134,7 @@ public class VersionedDOIIdentifierProvider extends DOIIdentifierProvider { if (history != null) { // versioning is currently supported for items only // if we have a history, we have a item - doi = makeIdentifierBasedOnHistory(context, dso, history); + doi = makeIdentifierBasedOnHistory(context, dso, history, filter); } else { doi = loadOrCreateDOI(context, dso, null, filter).getDoi(); } @@ -145,7 +145,7 @@ public class VersionedDOIIdentifierProvider extends DOIIdentifierProvider { log.error("AuthorizationException while creating a new DOI: ", ex); throw new IdentifierException(ex); } - return doi; + return DOI.SCHEME + doi; } @Override @@ -162,7 +162,7 @@ public class VersionedDOIIdentifierProvider extends DOIIdentifierProvider { Item item = (Item) dso; if (StringUtils.isEmpty(identifier)) { - identifier = mint(context, dso); + identifier = mint(context, dso, filter); } String doiIdentifier = doiService.formatIdentifier(identifier); @@ -170,10 +170,10 @@ public class VersionedDOIIdentifierProvider extends DOIIdentifierProvider { // search DOI in our db try { - doi = loadOrCreateDOI(context, dso, doiIdentifier); + doi = loadOrCreateDOI(context, dso, doiIdentifier, filter); } catch (SQLException ex) { - log.error("Error in databse connection: " + ex.getMessage(), ex); - throw new RuntimeException("Error in database conncetion.", ex); + log.error("Error in database connection: " + ex.getMessage(), ex); + throw new RuntimeException("Error in database connection.", ex); } if (DELETED.equals(doi.getStatus()) || diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/IdentifierRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/IdentifierRestRepository.java index 1be569d18e..b4a9688942 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/IdentifierRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/IdentifierRestRepository.java @@ -234,7 +234,8 @@ public class IdentifierRestRepository extends DSpaceRestRepository Date: Wed, 29 Mar 2023 20:44:34 +0100 Subject: [PATCH 015/125] Update VersionedDOIIdentifierProvider.java getBareDOI method already adds doi: SCHEME --- .../org/dspace/identifier/VersionedDOIIdentifierProvider.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java b/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java index a246906b54..7151f6a492 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java +++ b/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java @@ -126,7 +126,7 @@ public class VersionedDOIIdentifierProvider extends DOIIdentifierProvider { "A problem with the database connection occurd while processing DOI " + versionedDOI + ".", ex); throw new RuntimeException("A problem with the database connection occured.", ex); } - return DOI.SCHEME + versionedDOI; + return versionedDOI; } } From ca66fc7309484169b9442716a8887981c870767d Mon Sep 17 00:00:00 2001 From: Agustina Martinez Date: Wed, 29 Mar 2023 21:05:07 +0100 Subject: [PATCH 016/125] Update VersionedDOIIdentifierProvider.java Include check for DOI.SCHEME before returning DOI --- .../org/dspace/identifier/VersionedDOIIdentifierProvider.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java b/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java index 7151f6a492..a933e85d30 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java +++ b/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java @@ -145,7 +145,7 @@ public class VersionedDOIIdentifierProvider extends DOIIdentifierProvider { log.error("AuthorizationException while creating a new DOI: ", ex); throw new IdentifierException(ex); } - return DOI.SCHEME + doi; + return doi.startsWith(DOI.SCHEME) ? doi : DOI.SCHEME + doi; } @Override From a11ed8a0d3f778f8e937512e726d71e28b577349 Mon Sep 17 00:00:00 2001 From: Alexandre Vryghem Date: Thu, 30 Mar 2023 15:38:09 +0200 Subject: [PATCH 017/125] 100553: Sort the queried metadata fields ASC to always display exact matches on top (this can otherwise lead to angular errors) --- .../discovery/indexobject/MetadataFieldIndexFactoryImpl.java | 1 + .../dspace/app/rest/repository/MetadataFieldRestRepository.java | 1 + 2 files changed, 2 insertions(+) diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/MetadataFieldIndexFactoryImpl.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/MetadataFieldIndexFactoryImpl.java index 518a8ff145..bef44326fe 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/MetadataFieldIndexFactoryImpl.java +++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/MetadataFieldIndexFactoryImpl.java @@ -64,6 +64,7 @@ public class MetadataFieldIndexFactoryImpl extends IndexFactoryImpl Date: Thu, 30 Mar 2023 15:42:18 +0200 Subject: [PATCH 018/125] 100553: Fixed the pagination for core/metadatafield/byFieldName rest endpoint --- .../rest/repository/MetadataFieldRestRepository.java | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java index b5d12f1d45..65e50005b5 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java @@ -135,13 +135,14 @@ public class MetadataFieldRestRepository extends DSpaceRestRepository matchingMetadataFields = new ArrayList<>(); if (StringUtils.isBlank(exactName)) { // Find matches in Solr Search core DiscoverQuery discoverQuery = - this.createDiscoverQuery(context, schemaName, elementName, qualifierName, query); + this.createDiscoverQuery(context, schemaName, elementName, qualifierName, query, pageable); try { DiscoverResult searchResult = searchService.search(context, null, discoverQuery); for (IndexableObject object : searchResult.getIndexableObjects()) { @@ -149,6 +150,7 @@ public class MetadataFieldRestRepository extends DSpaceRestRepository filterQueries = new ArrayList<>(); if (StringUtils.isNotBlank(query)) { if (query.split("\\.").length > 3) { @@ -211,6 +214,8 @@ public class MetadataFieldRestRepository extends DSpaceRestRepository Date: Thu, 30 Mar 2023 17:26:29 +0200 Subject: [PATCH 019/125] 100553: Added backend validation on schema, element and qualifier to check if they contain dots --- .../app/rest/repository/MetadataFieldRestRepository.java | 4 ++++ .../app/rest/repository/MetadataSchemaRestRepository.java | 2 ++ 2 files changed, 6 insertions(+) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java index 65e50005b5..0396a8ad67 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java @@ -253,10 +253,14 @@ public class MetadataFieldRestRepository extends DSpaceRestRepository Date: Mon, 3 Apr 2023 13:05:50 +0200 Subject: [PATCH 020/125] 100553: Removed possibility to updated schema name, element and qualifier --- .../MetadataFieldRestRepository.java | 20 +++-- .../MetadataSchemaRestRepository.java | 15 ++-- .../rest/MetadataSchemaRestRepositoryIT.java | 30 ++++++- .../rest/MetadatafieldRestRepositoryIT.java | 86 +++++++++++++++---- 4 files changed, 114 insertions(+), 37 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java index 0396a8ad67..c185e83342 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java @@ -313,21 +313,23 @@ public class MetadataFieldRestRepository extends DSpaceRestRepository Date: Wed, 5 Apr 2023 13:20:24 +0200 Subject: [PATCH 021/125] 97248: Implement feedback --- .../org/dspace/discovery/SearchUtils.java | 22 +- .../discovery/SolrServiceFileInfoPlugin.java | 8 +- .../DiscoveryConfigurationService.java | 75 +- .../InprogressSubmissionIndexFactoryImpl.java | 2 +- .../indexobject/ItemIndexFactoryImpl.java | 2 +- .../repository/DiscoveryRestRepository.java | 10 +- .../config/spring/api/discovery.xml | 3067 ----------------- .../DiscoveryScopeBasedRestControllerIT.java | 22 +- .../app/rest/matcher/FacetEntryMatcher.java | 2 +- 9 files changed, 95 insertions(+), 3115 deletions(-) delete mode 100644 dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/discovery.xml diff --git a/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java b/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java index 418720be4a..4a53e34454 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java +++ b/dspace-api/src/main/java/org/dspace/discovery/SearchUtils.java @@ -62,12 +62,24 @@ public class SearchUtils { return searchService; } + /** + * Retrieves the Discovery Configuration for a null context, prefix and DSpace object. + * This will result in returning the default configuration + * @return the default configuration + */ public static DiscoveryConfiguration getDiscoveryConfiguration() { return getDiscoveryConfiguration(null, null, null); } - public static DiscoveryConfiguration getDiscoveryConfiguration(final Context context, - DSpaceObject dso) { + /** + * Retrieves the Discovery Configuration with a null prefix for a DSpace object. + * @param context + * the dabase context + * @param dso + * the DSpace object + * @return the Discovery Configuration for the specified DSpace object + */ + public static DiscoveryConfiguration getDiscoveryConfiguration(Context context, DSpaceObject dso) { return getDiscoveryConfiguration(context, null, dso); } @@ -84,7 +96,7 @@ public class SearchUtils { * the DSpaceObject * @return the discovery configuration for the specified scope */ - public static DiscoveryConfiguration getDiscoveryConfiguration(final Context context, String prefix, + public static DiscoveryConfiguration getDiscoveryConfiguration(Context context, String prefix, DSpaceObject dso) { if (prefix != null) { return getDiscoveryConfigurationByName(dso != null ? prefix + "." + dso.getHandle() : prefix); @@ -161,12 +173,12 @@ public class SearchUtils { * Method that retrieves a list of all the configuration objects from the given item * A configuration object can be returned for each parent community/collection * - * @param item the DSpace item * @param context the database context + * @param item the DSpace item * @return a list of configuration objects * @throws SQLException An exception that provides information on a database access error or other errors. */ - public static List getAllDiscoveryConfigurations(Item item, Context context) + public static List getAllDiscoveryConfigurations(Context context, Item item) throws SQLException { List collections = item.getCollections(); return getAllDiscoveryConfigurations(context, null, collections, item); diff --git a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceFileInfoPlugin.java b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceFileInfoPlugin.java index 6bda2fc52d..7aece5acf3 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/SolrServiceFileInfoPlugin.java +++ b/dspace-api/src/main/java/org/dspace/discovery/SolrServiceFileInfoPlugin.java @@ -53,16 +53,20 @@ public class SolrServiceFileInfoPlugin implements SolrServiceIndexPlugin { if (bitstreams != null) { for (Bitstream bitstream : bitstreams) { document.addField(SOLR_FIELD_NAME_FOR_FILENAMES, bitstream.getName()); + // Add _keyword and _filter fields which are necessary to support filtering and faceting + // for the file names document.addField(SOLR_FIELD_NAME_FOR_FILENAMES + "_keyword", bitstream.getName()); document.addField(SOLR_FIELD_NAME_FOR_FILENAMES + "_filter", bitstream.getName()); String description = bitstream.getDescription(); if ((description != null) && !description.isEmpty()) { document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS, description); + // Add _keyword and _filter fields which are necessary to support filtering and + // faceting for the descriptions document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS + "_keyword", - bitstream.getName()); + description); document.addField(SOLR_FIELD_NAME_FOR_DESCRIPTIONS + "_filter", - bitstream.getName()); + description); } } } diff --git a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java index 7d5b435555..da23b87a35 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java +++ b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java @@ -17,6 +17,8 @@ import java.util.UUID; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.dspace.content.Collection; +import org.dspace.content.Community; import org.dspace.content.DSpaceObject; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.DSpaceObjectService; @@ -36,10 +38,11 @@ public class DiscoveryConfigurationService { private Map> toIgnoreMetadataFields = new HashMap<>(); /** - * Discovery configurations, cached by DSO UUID. When a DSO doesn't have its own configuration, we take the one of - * the first parent that does. This cache ensures we don't have to go up the hierarchy every time. + * Discovery configurations, cached by Community/Collection UUID. When a Community or Collection does not have its + * own configuration, we take the one of the first parent that does. + * This cache ensures we do not have to go up the hierarchy every time. */ - private final Map uuidMap = new HashMap<>(); + private final Map comColToDiscoveryConfigurationMap = new HashMap<>(); public Map getMap() { return map; @@ -57,15 +60,26 @@ public class DiscoveryConfigurationService { this.toIgnoreMetadataFields = toIgnoreMetadataFields; } - public DiscoveryConfiguration getDiscoveryConfiguration(final Context context, - IndexableObject dso) { + /** + * Retrieve the discovery configuration for the provided IndexableObject. When a DSpace Object can be retrieved from + * the IndexableObject, the discovery configuration will be returned for the DSpace Object. Otherwise, a check will + * be done to look for the unique index ID of the IndexableObject. When the IndexableObject is null, the default + * configuration will be retrieved + * + * When no direct match is found, the parent object will + * be checked until there is no parent left, in which case the "default" configuration will be returned. + * @param context - The database context + * @param indexableObject - The IndexableObject to retrieve the configuration for + * @return the discovery configuration for the provided IndexableObject. + */ + public DiscoveryConfiguration getDiscoveryConfiguration(Context context, IndexableObject indexableObject) { String name; - if (dso == null) { - name = "default"; - } else if (dso instanceof IndexableDSpaceObject) { - return getDiscoveryDSOConfiguration(context, ((IndexableDSpaceObject) dso).getIndexedObject()); + if (indexableObject == null) { + return getDiscoveryConfiguration(null); + } else if (indexableObject instanceof IndexableDSpaceObject) { + return getDiscoveryDSOConfiguration(context, ((IndexableDSpaceObject) indexableObject).getIndexedObject()); } else { - name = dso.getUniqueIndexID(); + name = indexableObject.getUniqueIndexID(); } return getDiscoveryConfiguration(name); } @@ -77,16 +91,15 @@ public class DiscoveryConfigurationService { * @param dso - The DSpace object to retrieve the configuration for * @return the discovery configuration for the provided DSO. */ - public DiscoveryConfiguration getDiscoveryDSOConfiguration(final Context context, - DSpaceObject dso) { + public DiscoveryConfiguration getDiscoveryDSOConfiguration(final Context context, DSpaceObject dso) { // Fall back to default configuration if (dso == null) { - return getDiscoveryConfiguration("default", false); + return getDiscoveryConfiguration(null, true); } // Attempt to retrieve cached configuration by UUID - if (uuidMap.containsKey(dso.getID())) { - return uuidMap.get(dso.getID()); + if (comColToDiscoveryConfigurationMap.containsKey(dso.getID())) { + return comColToDiscoveryConfigurationMap.get(dso.getID()); } DiscoveryConfiguration configuration; @@ -107,13 +120,21 @@ public class DiscoveryConfigurationService { configuration = getDiscoveryDSOConfiguration(context, parentObject); } - // Cache the resulting configuration - uuidMap.put(dso.getID(), configuration); + // Cache the resulting configuration when the DSO is a Community or Collection + if (dso instanceof Community || dso instanceof Collection) { + comColToDiscoveryConfigurationMap.put(dso.getID(), configuration); + } return configuration; } - public DiscoveryConfiguration getDiscoveryConfiguration(final String name) { + /** + * Retrieve the Discovery Configuration for the provided name. When no configuration can be found for the name, the + * default configuration will be returned. + * @param name - The name of the configuration to be retrieved + * @return the Discovery Configuration for the provided name, or default when none was found. + */ + public DiscoveryConfiguration getDiscoveryConfiguration(String name) { return getDiscoveryConfiguration(name, true); } @@ -138,13 +159,23 @@ public class DiscoveryConfigurationService { return result; } - public DiscoveryConfiguration getDiscoveryConfigurationByNameOrDso(final Context context, - final String configurationName, - final IndexableObject dso) { + /** + * Retrieve the Discovery configuration for the provided name or IndexableObject. The configuration will first be + * checked for the provided name. When no match is found for the name, the configuration will be retrieved for the + * IndexableObject + * + * @param context - The database context + * @param configurationName - The name of the configuration to be retrieved + * @param indexableObject - The indexable object to retrieve the configuration for + * @return the Discovery configuration for the provided name, or when not found for the provided IndexableObject + */ + public DiscoveryConfiguration getDiscoveryConfigurationByNameOrIndexableObject(Context context, + String configurationName, + IndexableObject indexableObject) { if (StringUtils.isNotBlank(configurationName) && getMap().containsKey(configurationName)) { return getMap().get(configurationName); } else { - return getDiscoveryConfiguration(context, dso); + return getDiscoveryConfiguration(context, indexableObject); } } diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/InprogressSubmissionIndexFactoryImpl.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/InprogressSubmissionIndexFactoryImpl.java index ebedfc34b7..04c5e7d432 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/InprogressSubmissionIndexFactoryImpl.java +++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/InprogressSubmissionIndexFactoryImpl.java @@ -79,7 +79,7 @@ public abstract class InprogressSubmissionIndexFactoryImpl discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context, (WorkspaceItem) inProgressSubmission); } else { - discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(item, context); + discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context, item); } indexableItemService.addDiscoveryFields(doc, context, item, discoveryConfigurations); indexableCollectionService.storeCommunityCollectionLocations(doc, locations); diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java index b417237f76..412442fb1c 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java +++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java @@ -147,7 +147,7 @@ public class ItemIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(item, context); + List discoveryConfigurations = SearchUtils.getAllDiscoveryConfigurations(context, item); addDiscoveryFields(doc, context, indexableItem.getIndexedObject(), discoveryConfigurations); //mandatory facet to show status on mydspace diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DiscoveryRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DiscoveryRestRepository.java index e337e76ef2..46c8ab3e39 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DiscoveryRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DiscoveryRestRepository.java @@ -84,7 +84,7 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository { IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope); DiscoveryConfiguration discoveryConfiguration = searchConfigurationService - .getDiscoveryConfigurationByNameOrDso(context, configuration, scopeObject); + .getDiscoveryConfigurationByNameOrIndexableObject(context, configuration, scopeObject); return discoverConfigurationConverter.convert(discoveryConfiguration, utils.obtainProjection()); } @@ -96,7 +96,7 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository { Context context = obtainContext(); IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope); DiscoveryConfiguration discoveryConfiguration = searchConfigurationService - .getDiscoveryConfigurationByNameOrDso(context, configuration, scopeObject); + .getDiscoveryConfigurationByNameOrIndexableObject(context, configuration, scopeObject); DiscoverResult searchResult = null; DiscoverQuery discoverQuery = null; @@ -121,7 +121,7 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository { IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope); DiscoveryConfiguration discoveryConfiguration = searchConfigurationService - .getDiscoveryConfigurationByNameOrDso(context, configuration, scopeObject); + .getDiscoveryConfigurationByNameOrIndexableObject(context, configuration, scopeObject); return discoverFacetConfigurationConverter.convert(configuration, dsoScope, discoveryConfiguration); } @@ -138,7 +138,7 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository { IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope); DiscoveryConfiguration discoveryConfiguration = searchConfigurationService - .getDiscoveryConfigurationByNameOrDso(context, configuration, scopeObject); + .getDiscoveryConfigurationByNameOrIndexableObject(context, configuration, scopeObject); DiscoverQuery discoverQuery = queryBuilder.buildFacetQuery(context, scopeObject, discoveryConfiguration, prefix, query, searchFilters, dsoTypes, page, facetName); @@ -157,7 +157,7 @@ public class DiscoveryRestRepository extends AbstractDSpaceRestRepository { Pageable page = PageRequest.of(1, 1); IndexableObject scopeObject = scopeResolver.resolveScope(context, dsoScope); DiscoveryConfiguration discoveryConfiguration = searchConfigurationService - .getDiscoveryConfigurationByNameOrDso(context, configuration, scopeObject); + .getDiscoveryConfigurationByNameOrIndexableObject(context, configuration, scopeObject); DiscoverResult searchResult = null; DiscoverQuery discoverQuery = null; diff --git a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/discovery.xml b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/discovery.xml deleted file mode 100644 index e029c65aa0..0000000000 --- a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/discovery.xml +++ /dev/null @@ -1,3067 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - dc.rights - - - - - - - - - - - - - - - dc.rights - - - - - - - - dc.description.provenance - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - dc.title - dc.contributor.author - dc.creator - dc.subject - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - search.resourcetype:Item - - withdrawn:true OR discoverable:false - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - dc.title - dc.contributor.author - dc.creator - dc.subject - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - search.resourcetype:Item - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - dc.title - dc.contributor.author - dc.creator - dc.subject - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - search.resourcetype:Item OR search.resourcetype:WorkspaceItem OR search.resourcetype:XmlWorkflowItem - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - search.resourcetype:PoolTask OR search.resourcetype:ClaimedTask - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - search.resourcetype:XmlWorkflowItem - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - search.resourcetype:Item AND entityType_keyword:Publication - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - search.resourcetype:Item AND entityType_keyword:Person - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - search.resourcetype:Item AND entityType_keyword:Project - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - search.resourcetype:Item AND entityType_keyword:OrgUnit - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - search.resourcetype:Item AND entityType_keyword:JournalIssue - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - search.resourcetype:Item AND entityType_keyword:JournalVolume - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - search.resourcetype:Item AND entityType_keyword:Journal - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - search.resourcetype:Item AND (entityType_keyword:OrgUnit OR entityType_keyword:Person) - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - search.resourcetype:Item AND entityType_keyword:OrgUnit AND dc.type:FundingOrganization - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - dc.title - dc.contributor.author - dc.creator - dc.subject - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - dc.title - dc.contributor.author - dc.creator - dc.subject - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - dc.title - dc.contributor.author - dc.creator - dc.subject - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - dc.title - dc.contributor.author - dc.creator - dc.subject - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - dc.title - dc.contributor.author - dc.creator - dc.subject - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - dc.title - dc.contributor.author - dc.creator - dc.subject - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - dc.title - dc.contributor.author - dc.creator - dc.subject - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - dc.title - - - - - - - - - - - relation.isAuthorOfPublication - - - - - - - - - - - relation.isProjectOfPublication - - - - - - - - - - - - relation.isOrgUnitOfPublication - - - - - - - - - - - relation.isPublicationOfJournalIssue - - - - - - - - - - - relation.isJournalOfPublication - - - - - - - - - - - dc.contributor.author - dc.creator - - - - - - - - - - - - - - - dspace.entity.type - - - - - - - - - - - - - - dc.subject.* - - - - - - - - - - - - - - dc.date.issued - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - dc.type - - - - - - - - - dc.identifier - - - - - - - - - placeholder.placeholder.placeholder - - - - - - - - - - placeholder.placeholder.placeholder - - - - - - - - - person.jobTitle - - - - - - - - - - - - - - - person.knowsLanguage - - - - - - - - - - - - - person.birthDate - - - - - - - - - - - - - - - - - person.familyName - - - - - - - - - - - person.givenName - - - - - - - - - - - relation.isOrgUnitOfPerson - - - - - - - - - - - relation.isProjectOfPerson - - - - - - - - - - - relation.isPublicationOfAuthor - - - - - - - - - - - - organization.address.addressCountry - - - - - - - - - - - - - - - organization.address.addressLocality - - - - - - - - - - - - - - - organization.foundingDate - - - - - - - - - - - - - - - - organization.legalName - - - - - - - - - - - relation.isPersonOfOrgUnit - - - - - - - - - - - relation.isProjectOfOrgUnit - - - - - - - - - - - relation.isPublicationOfOrgUnit - - - - - - - - - - - creativework.keywords - - - - - - - - - - - - - - - creativework.datePublished - - - - - - - - - - - - - - - - publicationissue.issueNumber - - - - - - - - - - - relation.isPublicationOfJournalIssue - - - - - - - - - - - publicationVolume.volumeNumber - - - - - - - - - - - relation.isIssueOfJournalVolume - - - - - - - - - - - relation.isJournalOfVolume - - - - - - - - - - - creativework.publisher - - - - - - - - - - - - - - - creativework.editor - - - - - - - - - - - - - - - relation.isVolumeOfJournal - - - - - - - - - - - - - - placeholder.placeholder.placeholder - - - - - - - - - - relation.isOrgUnitOfProject - - - - - - - - - - - - relation.isPersonOfProject - - - - - - - - - - - - relation.isPublicationOfProject - - - - - - - - - - - relation.isContributorOfPublication - - - - - - - - - - - relation.isPublicationOfContributor - - - - - - - - - - - relation.isFundingAgencyOfProject - - - - - - - - - - - relation.isProjectOfFundingAgency - - - - - - - - - - - dc.test.parentcommunity1field - - - - - - - - - - - - - - - dc.test.subcommunity11field - - - - - - - - - - - - - - - dc.test.collection111field - - - - - - - - - - - - - - - dc.test.collection121field - - - - - - - - - - - - - - - dc.test.subcommunity21field - - - - - - - - - - - - - - dc.test.collection211field - - - - - - - - - - - - - - dc.test.collection221field - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java index 15c1019584..0c8735545e 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java @@ -272,7 +272,7 @@ public class DiscoveryScopeBasedRestControllerIT extends AbstractControllerInteg .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.matchFacet("parentcommunity1field", "text", false))) + FacetEntryMatcher.matchFacet(false, "parentcommunity1field", "text"))) ); getClient().perform(get("/api/discover/facets/parentcommunity1field") @@ -310,7 +310,7 @@ public class DiscoveryScopeBasedRestControllerIT extends AbstractControllerInteg .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.matchFacet("subcommunity11field", "text", false))) + FacetEntryMatcher.matchFacet(false, "subcommunity11field", "text"))) ); getClient().perform(get("/api/discover/facets/subcommunity11field") @@ -339,7 +339,7 @@ public class DiscoveryScopeBasedRestControllerIT extends AbstractControllerInteg .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.matchFacet("collection111field", "text", false))) + FacetEntryMatcher.matchFacet(false, "collection111field", "text"))) ); getClient().perform(get("/api/discover/facets/collection111field") @@ -366,7 +366,7 @@ public class DiscoveryScopeBasedRestControllerIT extends AbstractControllerInteg .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.matchFacet("subcommunity11field", "text", false))) + FacetEntryMatcher.matchFacet(false, "subcommunity11field", "text"))) ); getClient().perform(get("/api/discover/facets/subcommunity11field") @@ -391,7 +391,7 @@ public class DiscoveryScopeBasedRestControllerIT extends AbstractControllerInteg .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.matchFacet("parentcommunity1field", "text", false))) + FacetEntryMatcher.matchFacet(false, "parentcommunity1field", "text"))) ); getClient().perform(get("/api/discover/facets/parentcommunity1field") @@ -420,7 +420,7 @@ public class DiscoveryScopeBasedRestControllerIT extends AbstractControllerInteg .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.matchFacet("collection121field", "text", false))) + FacetEntryMatcher.matchFacet(false, "collection121field", "text"))) ); getClient().perform(get("/api/discover/facets/collection121field") @@ -445,7 +445,7 @@ public class DiscoveryScopeBasedRestControllerIT extends AbstractControllerInteg .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.matchFacet("parentcommunity1field", "text", false))) + FacetEntryMatcher.matchFacet(false, "parentcommunity1field", "text"))) ); getClient().perform(get("/api/discover/facets/parentcommunity1field") @@ -490,7 +490,7 @@ public class DiscoveryScopeBasedRestControllerIT extends AbstractControllerInteg .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.matchFacet("subcommunity21field", "text", false))) + FacetEntryMatcher.matchFacet(false, "subcommunity21field", "text"))) ); getClient().perform(get("/api/discover/facets/subcommunity21field") @@ -519,7 +519,7 @@ public class DiscoveryScopeBasedRestControllerIT extends AbstractControllerInteg .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.matchFacet("collection211field", "text", false))) + FacetEntryMatcher.matchFacet(false, "collection211field", "text"))) ); getClient().perform(get("/api/discover/facets/collection211field") @@ -546,7 +546,7 @@ public class DiscoveryScopeBasedRestControllerIT extends AbstractControllerInteg .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.matchFacet("subcommunity21field", "text", false))) + FacetEntryMatcher.matchFacet(false, "subcommunity21field", "text"))) ); getClient().perform(get("/api/discover/facets/subcommunity21field") @@ -588,7 +588,7 @@ public class DiscoveryScopeBasedRestControllerIT extends AbstractControllerInteg .andExpect(jsonPath("$._links.self.href", containsString("api/discover/facets"))) .andExpect(jsonPath("$._embedded.facets", containsInAnyOrder( FacetEntryMatcher.authorFacet(false), - FacetEntryMatcher.matchFacet("collection221field", "text", false))) + FacetEntryMatcher.matchFacet(false, "collection221field", "text"))) ); getClient().perform(get("/api/discover/facets/collection221field") diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetEntryMatcher.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetEntryMatcher.java index 34b7b8b30d..60b5f417ed 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetEntryMatcher.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/FacetEntryMatcher.java @@ -99,7 +99,7 @@ public class FacetEntryMatcher { ); } - public static Matcher matchFacet(String name, String facetType, boolean hasNext) { + public static Matcher matchFacet(boolean hasNext, String name, String facetType) { return allOf( hasJsonPath("$.name", is(name)), hasJsonPath("$.facetType", is(facetType)), From e433720cd005ddf6b6e13ce09988c72500a74115 Mon Sep 17 00:00:00 2001 From: Yana De Pauw Date: Wed, 5 Apr 2023 14:07:52 +0200 Subject: [PATCH 022/125] Add test-discovery xml --- .../config/spring/api/test-discovery.xml | 1115 +++++++++++++++++ 1 file changed, 1115 insertions(+) create mode 100644 dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/test-discovery.xml diff --git a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/test-discovery.xml b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/test-discovery.xml new file mode 100644 index 0000000000..8b11a87e2d --- /dev/null +++ b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/test-discovery.xml @@ -0,0 +1,1115 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.rights + + + + + + + + + + + + + + + dc.rights + + + + + + + + dc.description.provenance + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + search.resourcetype:Item OR search.resourcetype:Collection OR search.resourcetype:Community + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.title + dc.contributor.author + dc.creator + dc.subject + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + dc.test.parentcommunity1field + + + + + + + + + + + + + + + dc.test.subcommunity11field + + + + + + + + + + + + + + + dc.test.collection111field + + + + + + + + + + + + + + + dc.test.collection121field + + + + + + + + + + + + + + + dc.test.subcommunity21field + + + + + + + + + + + + + + dc.test.collection211field + + + + + + + + + + + + + + dc.test.collection221field + + + + + + + + + + + From cf831ed7d5892fb86b1569b8c02c406e546e2e8e Mon Sep 17 00:00:00 2001 From: Yana De Pauw Date: Fri, 7 Apr 2023 16:26:12 +0200 Subject: [PATCH 023/125] Fix merge issues --- dspace-api/src/main/java/org/dspace/browse/SolrBrowseDAO.java | 2 +- .../data/dspaceFolder/config/spring/api/test-discovery.xml | 3 +++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/browse/SolrBrowseDAO.java b/dspace-api/src/main/java/org/dspace/browse/SolrBrowseDAO.java index e02367f6eb..f99aab852b 100644 --- a/dspace-api/src/main/java/org/dspace/browse/SolrBrowseDAO.java +++ b/dspace-api/src/main/java/org/dspace/browse/SolrBrowseDAO.java @@ -239,7 +239,7 @@ public class SolrBrowseDAO implements BrowseDAO { } private void addDefaultFilterQueries(DiscoverQuery query) { - DiscoveryConfiguration discoveryConfiguration = SearchUtils.getDiscoveryConfiguration(container); + DiscoveryConfiguration discoveryConfiguration = SearchUtils.getDiscoveryConfiguration(context, container); discoveryConfiguration.getDefaultFilterQueries().forEach(query::addFilterQueries); } diff --git a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/test-discovery.xml b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/test-discovery.xml index 8b11a87e2d..4a91ef051e 100644 --- a/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/test-discovery.xml +++ b/dspace-server-webapp/src/test/data/dspaceFolder/config/spring/api/test-discovery.xml @@ -39,10 +39,13 @@ + + + From b48ea37de5d2b87d550099c41526cdbbf2969831 Mon Sep 17 00:00:00 2001 From: Tuan Nguyen Date: Mon, 17 Apr 2023 15:16:17 -0400 Subject: [PATCH 024/125] properly escape ':' in query --- .../java/org/dspace/content/authority/SolrAuthority.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/content/authority/SolrAuthority.java b/dspace-api/src/main/java/org/dspace/content/authority/SolrAuthority.java index 497fa08f2f..123626cd09 100644 --- a/dspace-api/src/main/java/org/dspace/content/authority/SolrAuthority.java +++ b/dspace-api/src/main/java/org/dspace/content/authority/SolrAuthority.java @@ -200,8 +200,8 @@ public class SolrAuthority implements ChoiceAuthority { } private String toQuery(String searchField, String text) { - return searchField + ":(" + text.toLowerCase().replaceAll(":", "\\:") + "*) or " + searchField + ":(" + text - .toLowerCase().replaceAll(":", "\\:") + ")"; + return searchField + ":(" + text.toLowerCase().replaceAll(":", "\\\\:") + "*) or " + searchField + ":(" + text + .toLowerCase().replaceAll(":", "\\\\:") + ")"; } @Override @@ -225,7 +225,7 @@ public class SolrAuthority implements ChoiceAuthority { log.debug("requesting label for key " + key + " using locale " + locale); } SolrQuery queryArgs = new SolrQuery(); - queryArgs.setQuery("id:" + key); + queryArgs.setQuery("id:" + key.replaceAll(":", "\\\\:")); queryArgs.setRows(1); QueryResponse searchResponse = getSearchService().search(queryArgs); SolrDocumentList docs = searchResponse.getResults(); From 583b38a2e6cbe9adf495423063dbe5b5ded2a707 Mon Sep 17 00:00:00 2001 From: Kristof De Langhe Date: Wed, 5 Apr 2023 17:48:55 +0200 Subject: [PATCH 025/125] 100414: Missing search_result statistics fix --- .../rest/converter/SearchEventConverter.java | 10 +++ .../app/rest/model/SearchEventRest.java | 9 +++ .../app/rest/utils/DSpaceObjectResolver.java | 61 +++++++++++++++++++ .../app/rest/SearchEventRestRepositoryIT.java | 55 +++++++++++++++++ 4 files changed, 135 insertions(+) create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/DSpaceObjectResolver.java diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SearchEventConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SearchEventConverter.java index 470a3ac342..a8203e272f 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SearchEventConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SearchEventConverter.java @@ -14,6 +14,7 @@ import javax.servlet.http.HttpServletRequest; import org.dspace.app.rest.model.PageRest; import org.dspace.app.rest.model.SearchEventRest; import org.dspace.app.rest.model.SearchResultsRest; +import org.dspace.app.rest.utils.DSpaceObjectResolver; import org.dspace.app.rest.utils.ScopeResolver; import org.dspace.content.DSpaceObject; import org.dspace.core.Context; @@ -29,11 +30,20 @@ public class SearchEventConverter { @Autowired private ScopeResolver scopeResolver; + @Autowired + private DSpaceObjectResolver dSpaceObjectResolver; + public UsageSearchEvent convert(Context context, HttpServletRequest request, SearchEventRest searchEventRest) { UsageSearchEvent usageSearchEvent = new UsageSearchEvent(UsageEvent.Action.SEARCH, request, context, null); usageSearchEvent.setQuery(searchEventRest.getQuery()); usageSearchEvent.setDsoType(searchEventRest.getDsoType()); + if (searchEventRest.getObject() != null) { + IndexableObject object = dSpaceObjectResolver.resolveObject(context, searchEventRest.getObject()); + if (object != null && object.getIndexedObject() instanceof DSpaceObject) { + usageSearchEvent.setObject((DSpaceObject) object.getIndexedObject()); + } + } if (searchEventRest.getScope() != null) { IndexableObject scopeObject = scopeResolver.resolveScope(context, String.valueOf(searchEventRest.getScope())); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SearchEventRest.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SearchEventRest.java index e029dbaf99..637acb9bfd 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SearchEventRest.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SearchEventRest.java @@ -25,6 +25,7 @@ public class SearchEventRest extends BaseObjectRest { private UUID scope; private String configuration; private String dsoType; + private UUID object; private List appliedFilters; private SearchResultsRest.Sorting sort; private PageRest page; @@ -97,4 +98,12 @@ public class SearchEventRest extends BaseObjectRest { public void setDsoType(String dsoType) { this.dsoType = dsoType; } + + public UUID getObject() { + return object; + } + + public void setObject(UUID object) { + this.object = object; + } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/DSpaceObjectResolver.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/DSpaceObjectResolver.java new file mode 100644 index 0000000000..7ded06bdf5 --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/DSpaceObjectResolver.java @@ -0,0 +1,61 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.utils; + +import java.sql.SQLException; +import java.util.UUID; + +import org.apache.log4j.Logger; +import org.dspace.content.service.CollectionService; +import org.dspace.content.service.CommunityService; +import org.dspace.content.service.ItemService; +import org.dspace.core.Context; +import org.dspace.discovery.IndexableObject; +import org.dspace.discovery.indexobject.IndexableCollection; +import org.dspace.discovery.indexobject.IndexableCommunity; +import org.dspace.discovery.indexobject.IndexableItem; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; + +@Component +public class DSpaceObjectResolver { + /* Log4j logger */ + private static final Logger log = Logger.getLogger(DSpaceObjectResolver.class); + + @Autowired + ItemService itemService; + + @Autowired + CollectionService collectionService; + + @Autowired + CommunityService communityService; + + public IndexableObject resolveObject(Context context, UUID uuid) { + IndexableObject object = null; + if (uuid != null) { + try { + object = new IndexableCommunity(communityService.find(context, uuid)); + if (object.getIndexedObject() == null) { + object = new IndexableCollection(collectionService.find(context, uuid)); + } + if (object.getIndexedObject() == null) { + object = new IndexableItem(itemService.find(context, uuid)); + } + if (object.getIndexedObject() == null) { + throw new IllegalArgumentException("UUID " + uuid + " is expected to resolve to a Community, " + + "Collection or Item, but didn't resolve to any"); + } + } catch (SQLException e) { + log.warn("Unable to retrieve DSpace Object with ID " + uuid + " from the database", e); + } + } + return object; + } + +} diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SearchEventRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SearchEventRestRepositoryIT.java index bd40cfdc9d..63ca4b735d 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SearchEventRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SearchEventRestRepositoryIT.java @@ -411,4 +411,59 @@ public class SearchEventRestRepositoryIT extends AbstractControllerIntegrationTe .andExpect(status().isCreated()); } + + @Test + public void postTestWithObjectSuccess() throws Exception { + + context.turnOffAuthorisationSystem(); + + //** GIVEN ** + //1. A community-collection structure with one parent community with sub-community and two collections. + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); + Collection col2 = CollectionBuilder.createCollection(context, child1).withName("Collection 2").build(); + + //2. Three public items that are readable by Anonymous with different subjects + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Public item 1") + .withIssueDate("2017-10-17") + .withAuthor("Smith, Donald").withAuthor("Doe, John") + .withSubject("ExtraEntry") + .build(); + + context.restoreAuthSystemState(); + + SearchEventRest searchEventRest = new SearchEventRest(); + + searchEventRest.setQuery("test"); + searchEventRest.setScope(publicItem1.getID()); + searchEventRest.setConfiguration("default"); + searchEventRest.setDsoType("item"); + searchEventRest.setObject(publicItem1.getID()); + + SearchResultsRest.Sorting sort = new SearchResultsRest.Sorting("title", "desc"); + searchEventRest.setSort(sort); + + PageRest pageRest = new PageRest(5, 20, 4, 1); + searchEventRest.setPage(pageRest); + + SearchResultsRest.AppliedFilter appliedFilter = + new SearchResultsRest.AppliedFilter("author", "contains", "test","test"); + List appliedFilterList = new LinkedList<>(); + appliedFilterList.add(appliedFilter); + searchEventRest.setAppliedFilters(appliedFilterList); + + ObjectMapper mapper = new ObjectMapper(); + + getClient().perform(post("/api/statistics/searchevents") + .content(mapper.writeValueAsBytes(searchEventRest)) + .contentType(contentType)) + .andExpect(status().isCreated()); + + } } From d9f7e04368f60b38a0cea43d7a8771121feaf346 Mon Sep 17 00:00:00 2001 From: aroman-arvo Date: Thu, 20 Apr 2023 11:13:28 +0200 Subject: [PATCH 026/125] #8636 Request a Copy's "helpdesk" strategy requires authentication to respond --- .../org/dspace/app/requestitem/RequestItemEmailNotifier.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java index 384f33deca..61b42fd185 100644 --- a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java +++ b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java @@ -175,9 +175,12 @@ public class RequestItemEmailNotifier { } } else { Bitstream bitstream = ri.getBitstream(); + // #8636 Anyone receiving the email can respond to the request without authenticating into DSpace + context.turnOffAuthorisationSystem(); email.addAttachment(bitstreamService.retrieve(context, bitstream), bitstream.getName(), bitstream.getFormat(context).getMIMEType()); + context.restoreAuthSystemState(); } email.send(); } else { From 98b843668e07ae2112b0ce7dbbd741af7afd3189 Mon Sep 17 00:00:00 2001 From: Sean Kalynuk Date: Thu, 20 Apr 2023 14:08:30 -0500 Subject: [PATCH 027/125] Fix #8490 preferred label lookup When the authority key still needs to be generated, skip the lookup for the preferred label since it does not exist in Solr yet. --- .../dspace/discovery/indexobject/ItemIndexFactoryImpl.java | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java b/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java index 61827747b7..fc024cc524 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java +++ b/dspace-api/src/main/java/org/dspace/discovery/indexobject/ItemIndexFactoryImpl.java @@ -31,6 +31,7 @@ import org.apache.commons.lang3.time.DateFormatUtils; import org.apache.logging.log4j.Logger; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.common.SolrInputDocument; +import org.dspace.authority.service.AuthorityValueService; import org.dspace.content.Collection; import org.dspace.content.Community; import org.dspace.content.Item; @@ -93,6 +94,8 @@ public class ItemIndexFactoryImpl extends DSpaceObjectIndexFactoryImpl Date: Mon, 6 Mar 2023 09:34:29 -0600 Subject: [PATCH 028/125] Update Spring & Spring Boot versions --- pom.xml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pom.xml b/pom.xml index 3b6f7edb65..062544c91c 100644 --- a/pom.xml +++ b/pom.xml @@ -19,9 +19,9 @@ 11 - 5.3.20 - 2.6.8 - 5.6.5 + 5.3.25 + 2.7.9 + 5.7.7 5.6.5.Final 6.0.23.Final 42.4.3 From 42f548b17e282caeef614e6e87012ad20c4775f9 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Fri, 10 Mar 2023 14:40:07 -0600 Subject: [PATCH 029/125] Remove check for double-encoded param. It is no longer double-encoded after upgrading Spring Boot. --- .../java/org/dspace/app/rest/DiscoveryRestControllerIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRestControllerIT.java index 8b01e7b377..a115c8aa2f 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRestControllerIT.java @@ -6473,7 +6473,7 @@ public class DiscoveryRestControllerIT extends AbstractControllerIntegrationTest .andExpect(jsonPath("$.facetType", equalTo("authority"))) //There always needs to be a self link available .andExpect(jsonPath("$._links.self.href", - containsString("api/discover/facets/supervisedBy?prefix=group%2520B&configuration=supervision"))) + containsString("api/discover/facets/supervisedBy?prefix=group%20B&configuration=supervision"))) //This is how the page object must look like because it's the default with size 20 .andExpect(jsonPath("$.page", is(PageMatcher.pageEntry(0, 20)))) From 66d7c1bde6295798cea551f1f463fccb9011ea6d Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Fri, 24 Mar 2023 09:16:52 -0500 Subject: [PATCH 030/125] Update to latest spring-boot and spring --- pom.xml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pom.xml b/pom.xml index 062544c91c..1b4d34ccbf 100644 --- a/pom.xml +++ b/pom.xml @@ -19,8 +19,8 @@ 11 - 5.3.25 - 2.7.9 + 5.3.26 + 2.7.10 5.7.7 5.6.5.Final 6.0.23.Final From 8cd82e5d2e13b4fdca1fbc64634bbf825ced0708 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Tue, 25 Apr 2023 17:13:26 -0500 Subject: [PATCH 031/125] Update to Spring Boot v2.7.11 --- pom.xml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pom.xml b/pom.xml index 1b4d34ccbf..aec4b57fc1 100644 --- a/pom.xml +++ b/pom.xml @@ -19,9 +19,9 @@ 11 - 5.3.26 - 2.7.10 - 5.7.7 + 5.3.27 + 2.7.11 + 5.7.8 5.6.5.Final 6.0.23.Final 42.4.3 From c919df81fb15868169a0a14d5be42079fe0c3908 Mon Sep 17 00:00:00 2001 From: Michael Plate Date: Fri, 28 Apr 2023 17:09:55 +0200 Subject: [PATCH 032/125] LDAP Auth extended for many groups --- .../authenticate/LDAPAuthentication.java | 90 ++++++++++++------- 1 file changed, 59 insertions(+), 31 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/authenticate/LDAPAuthentication.java b/dspace-api/src/main/java/org/dspace/authenticate/LDAPAuthentication.java index f3c6022e02..afd82db863 100644 --- a/dspace-api/src/main/java/org/dspace/authenticate/LDAPAuthentication.java +++ b/dspace-api/src/main/java/org/dspace/authenticate/LDAPAuthentication.java @@ -11,9 +11,11 @@ import static org.dspace.eperson.service.EPersonService.MD_PHONE; import java.io.IOException; import java.sql.SQLException; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Hashtable; +import java.util.Iterator; import java.util.List; import javax.naming.NamingEnumeration; import javax.naming.NamingException; @@ -64,6 +66,7 @@ import org.dspace.services.factory.DSpaceServicesFactory; * @author Reuben Pasquini * @author Samuel Ottenhoff * @author Ivan Masár + * @author Michael Plate */ public class LDAPAuthentication implements AuthenticationMethod { @@ -391,7 +394,7 @@ public class LDAPAuthentication protected String ldapGivenName = null; protected String ldapSurname = null; protected String ldapPhone = null; - protected String ldapGroup = null; + protected ArrayList ldapGroup = null; /** * LDAP settings @@ -406,9 +409,9 @@ public class LDAPAuthentication final String ldap_surname_field; final String ldap_phone_field; final String ldap_group_field; - final boolean useTLS; + SpeakerToLDAP(Logger thelog) { ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); @@ -547,7 +550,11 @@ public class LDAPAuthentication if (attlist[4] != null) { att = atts.get(attlist[4]); if (att != null) { - ldapGroup = (String) att.get(); + // loop through all groups returned by LDAP + ldapGroup = new ArrayList(); + for (NamingEnumeration val = att.getAll(); val.hasMoreElements(); ) { + ldapGroup.add((String) val.next()); + } } } @@ -693,48 +700,69 @@ public class LDAPAuthentication /* * Add authenticated users to the group defined in dspace.cfg by * the authentication-ldap.login.groupmap.* key. + * + * @param dn + * The string containing distinguished name of the user + * + * @param group + * List of strings with LDAP dn of groups + * + * @param context + * DSpace context */ - private void assignGroups(String dn, String group, Context context) { + private void assignGroups(String dn, ArrayList group, Context context) { if (StringUtils.isNotBlank(dn)) { System.out.println("dn:" + dn); int i = 1; String groupMap = configurationService.getProperty("authentication-ldap.login.groupmap." + i); - boolean cmp; + + // groupmap contains the mapping of LDAP groups to DSpace groups + // outer loop with the DSpace groups while (groupMap != null) { String t[] = groupMap.split(":"); String ldapSearchString = t[0]; String dspaceGroupName = t[1]; - if (group == null) { - cmp = StringUtils.containsIgnoreCase(dn, ldapSearchString + ","); - } else { - cmp = StringUtils.equalsIgnoreCase(group, ldapSearchString); - } + // list of strings with dn from LDAP groups + // inner loop + Iterator groupIterator = group.iterator(); + while (groupIterator.hasNext()) { - if (cmp) { - // assign user to this group - try { - Group ldapGroup = groupService.findByName(context, dspaceGroupName); - if (ldapGroup != null) { - groupService.addMember(context, ldapGroup, context.getCurrentUser()); - groupService.update(context, ldapGroup); - } else { - // The group does not exist - log.warn(LogHelper.getHeader(context, - "ldap_assignGroupsBasedOnLdapDn", - "Group defined in authentication-ldap.login.groupmap." + i - + " does not exist :: " + dspaceGroupName)); + // save the current entry from iterator for further use + String currentGroup = groupIterator.next(); + + // very much the old code from DSpace <= 7.5 + if (currentGroup == null) { + cmp = StringUtils.containsIgnoreCase(dn, ldapSearchString + ","); + } else { + cmp = StringUtils.equalsIgnoreCase(currentGroup, ldapSearchString); + } + + if (cmp) { + // assign user to this group + try { + Group ldapGroup = groupService.findByName(context, dspaceGroupName); + if (ldapGroup != null) { + groupService.addMember(context, ldapGroup, context.getCurrentUser()); + groupService.update(context, ldapGroup); + } else { + // The group does not exist + log.warn(LogHelper.getHeader(context, + "ldap_assignGroupsBasedOnLdapDn", + "Group defined in authentication-ldap.login.groupmap." + i + + " does not exist :: " + dspaceGroupName)); + } + } catch (AuthorizeException ae) { + log.debug(LogHelper.getHeader(context, + "assignGroupsBasedOnLdapDn could not authorize addition to " + + "group", + dspaceGroupName)); + } catch (SQLException e) { + log.debug(LogHelper.getHeader(context, "assignGroupsBasedOnLdapDn could not find group", + dspaceGroupName)); } - } catch (AuthorizeException ae) { - log.debug(LogHelper.getHeader(context, - "assignGroupsBasedOnLdapDn could not authorize addition to " + - "group", - dspaceGroupName)); - } catch (SQLException e) { - log.debug(LogHelper.getHeader(context, "assignGroupsBasedOnLdapDn could not find group", - dspaceGroupName)); } } From 80af0665c12b3707351d45db962975657ebfe7c9 Mon Sep 17 00:00:00 2001 From: nwoodward Date: Mon, 1 May 2023 15:15:28 -0500 Subject: [PATCH 033/125] send "forgot password" email if user tries to register with email address already in eperson table --- .../RegistrationRestRepository.java | 36 ++++++++++++------- 1 file changed, 23 insertions(+), 13 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RegistrationRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RegistrationRestRepository.java index ac44ccb4c2..c3c94a4d0b 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RegistrationRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RegistrationRestRepository.java @@ -138,21 +138,31 @@ public class RegistrationRestRepository extends DSpaceRestRepository Date: Tue, 2 May 2023 14:42:33 +0300 Subject: [PATCH 034/125] [DURACOM-126] solved Submission with a validation error on a hidden step can't be submitted --- .../dspace/app/util/SubmissionStepConfig.java | 29 +++++ .../dspaceFolder/config/item-submission.xml | 37 ++++++- .../dspaceFolder/config/submission-forms.xml | 29 +++++ .../converter/AInprogressItemConverter.java | 4 + .../SubmissionDefinitionsControllerIT.java | 100 +++++++++++------- .../app/rest/SubmissionFormsControllerIT.java | 66 ++++++++---- .../rest/WorkflowItemRestRepositoryIT.java | 31 ++++++ .../rest/WorkspaceItemRestRepositoryIT.java | 29 +++++ dspace/config/item-submission.xml | 1 - 9 files changed, 266 insertions(+), 60 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/app/util/SubmissionStepConfig.java b/dspace-api/src/main/java/org/dspace/app/util/SubmissionStepConfig.java index 5506b3c23f..1a8f2744b8 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/SubmissionStepConfig.java +++ b/dspace-api/src/main/java/org/dspace/app/util/SubmissionStepConfig.java @@ -11,6 +11,9 @@ import java.io.Serializable; import java.util.Map; import org.apache.commons.lang3.BooleanUtils; +import org.dspace.content.InProgressSubmission; +import org.dspace.content.WorkspaceItem; +import org.hibernate.proxy.HibernateProxyHelper; /** * Class representing configuration for a single step within an Item Submission @@ -173,6 +176,32 @@ public class SubmissionStepConfig implements Serializable { return visibilityOutside; } + public boolean isHiddenForInProgressSubmission(InProgressSubmission obj) { + + String scopeToCheck = getScope(obj); + + if (scope == null || scopeToCheck == null) { + return false; + } + + String visibility = getVisibility(); + String visibilityOutside = getVisibilityOutside(); + + if (scope.equalsIgnoreCase(scopeToCheck)) { + return "hidden".equalsIgnoreCase(visibility); + } else { + return visibilityOutside == null || "hidden".equalsIgnoreCase(visibilityOutside); + } + + } + + private String getScope(InProgressSubmission obj) { + if (HibernateProxyHelper.getClassWithoutInitializingProxy(obj).equals(WorkspaceItem.class)) { + return "submission"; + } + return "workflow"; + } + /** * Get the number of this step in the current Submission process config. * Step numbers start with #0 (although step #0 is ALWAYS the special diff --git a/dspace-api/src/test/data/dspaceFolder/config/item-submission.xml b/dspace-api/src/test/data/dspaceFolder/config/item-submission.xml index 6d8ae0c2f0..452460501a 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/item-submission.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/item-submission.xml @@ -23,6 +23,7 @@ + @@ -54,7 +55,6 @@ org.dspace.app.rest.submit.step.CollectionStep collection - submission submit.progressbar.describe.stepone @@ -149,6 +149,34 @@ org.dspace.app.rest.submit.step.ShowIdentifiersStep identifiers + + + submit.progressbar.describe.stepone + org.dspace.app.rest.submit.step.DescribeStep + submission-form + workflow + + + + submit.progressbar.describe.stepone + org.dspace.app.rest.submit.step.DescribeStep + submission-form + submission + + + + + org.dspace.app.rest.submit.step.CollectionStep + collection + + + + + org.dspace.app.rest.submit.step.CollectionStep + collection + submission + + @@ -222,6 +250,13 @@ + + + + + + + diff --git a/dspace-api/src/test/data/dspaceFolder/config/submission-forms.xml b/dspace-api/src/test/data/dspaceFolder/config/submission-forms.xml index 7438fda852..6b7349616e 100644 --- a/dspace-api/src/test/data/dspaceFolder/config/submission-forms.xml +++ b/dspace-api/src/test/data/dspaceFolder/config/submission-forms.xml @@ -436,6 +436,35 @@ it, please enter the types and the actual numbers or codes. + +
+ + + dc + title + + false + + onebox + Field required + + +
+ +
+ + + dc + type + + false + + onebox + Field required + + +
+ diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/AInprogressItemConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/AInprogressItemConverter.java index ce7ca34918..fa1d145011 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/AInprogressItemConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/AInprogressItemConverter.java @@ -86,6 +86,10 @@ public abstract class AInprogressItemConverter org.dspace.app.rest.submit.step.CollectionStep collection - submission diff --git a/dspace-swordv2/pom.xml b/dspace-swordv2/pom.xml index 35206d6ee2..9badeb2fe8 100644 --- a/dspace-swordv2/pom.xml +++ b/dspace-swordv2/pom.xml @@ -22,38 +22,6 @@ ${basedir}/..
- - - oracle-support - - - db.name - oracle - - - - - com.oracle - ojdbc6 - - - - - postgres-support - - - !db.name - - - - - org.postgresql - postgresql - - - - - javax.servlet diff --git a/dspace/modules/additions/pom.xml b/dspace/modules/additions/pom.xml index 8f5a6f84f4..dd98bf0cbd 100644 --- a/dspace/modules/additions/pom.xml +++ b/dspace/modules/additions/pom.xml @@ -61,22 +61,6 @@ - - oracle-support - - - db.name - oracle - - - - - com.oracle - ojdbc6 - - - - unit-test-environment diff --git a/dspace/modules/rest/pom.xml b/dspace/modules/rest/pom.xml index a7c9b5922c..115393b7db 100644 --- a/dspace/modules/rest/pom.xml +++ b/dspace/modules/rest/pom.xml @@ -90,24 +90,6 @@ - - - oracle-support - - - db.name - oracle - - - - - com.oracle - ojdbc6 - - - - - diff --git a/dspace/modules/server/pom.xml b/dspace/modules/server/pom.xml index 9b696fa0cb..41ddb94be5 100644 --- a/dspace/modules/server/pom.xml +++ b/dspace/modules/server/pom.xml @@ -244,22 +244,6 @@ just adding new jar in the classloader - - - oracle-support - - - db.name - oracle - - - - - com.oracle - ojdbc6 - - - diff --git a/pom.xml b/pom.xml index 3b6f7edb65..fba97314a5 100644 --- a/pom.xml +++ b/pom.xml @@ -1617,11 +1617,6 @@ icu4j 62.1 - - com.oracle - ojdbc6 - 11.2.0.4.0 - org.dspace From 6fbf97a6b39502f440ac1b6be2094ee40871d7d7 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Tue, 2 May 2023 11:57:47 -0500 Subject: [PATCH 037/125] Remove oracle mentions from configs / READMEs --- .../storage/rdbms/sqlmigration/h2/README.md | 28 +++++++------------ .../rdbms/sqlmigration/postgres/README.md | 7 +++-- dspace/config/dspace.cfg | 18 +++--------- dspace/config/local.cfg.EXAMPLE | 18 +++--------- 4 files changed, 22 insertions(+), 49 deletions(-) diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/README.md b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/README.md index 8088c6ccca..87e114ca53 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/README.md +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/h2/README.md @@ -4,33 +4,25 @@ in Production. Instead, DSpace uses the H2 Database to perform Unit Testing during development. -By default, the DSpace Unit Testing environment configures H2 to run in -"Oracle Mode" and initializes the H2 database using the scripts in this directory. +By default, the DSpace Unit Testing environment configures H2 to run in memory +and initializes the H2 database using the scripts in this directory. See +`[src]/dspace-api/src/test/data/dspaceFolder/config/local.cfg`. + These database migrations are automatically called by [Flyway](http://flywaydb.org/) -when the `DatabaseManager` initializes itself (see `initializeDatabase()` method). +in `DatabaseUtils`. -The H2 migrations in this directory are *based on* the Oracle Migrations, but -with some modifications in order to be valid in H2. +The H2 migrations in this directory all use H2's grammar/syntax. +For additional info see the [H2 SQL Grammar](https://www.h2database.com/html/grammar.html). -## Oracle vs H2 script differences - -One of the primary differences between the Oracle scripts and these H2 ones -is in the syntax of the `ALTER TABLE` command. Unfortunately, H2's syntax for -that command differs greatly from Oracle (and PostgreSQL as well). - -Most of the remainder of the scripts contain the exact Oracle syntax (which is -usually valid in H2). But, to you can always `diff` scripts of the same name -for further syntax differences. - -For additional info see the [H2 SQL Grammar](http://www.h2database.com/html/grammar.html). ## More Information on Flyway The SQL scripts in this directory are H2-specific database migrations. They are used to automatically upgrade your DSpace database using [Flyway](http://flywaydb.org/). As such, these scripts are automatically called by Flyway when the DSpace -`DatabaseManager` initializes itself (see `initializeDatabase()` method). During -that process, Flyway determines which version of DSpace your database is using +`DatabaseUtils` initializes. + +During that process, Flyway determines which version of DSpace your database is using and then executes the appropriate upgrade script(s) to bring it up to the latest version. diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/README.md b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/README.md index 72eb279912..e16e4c6d4c 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/README.md +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/postgres/README.md @@ -3,8 +3,9 @@ The SQL scripts in this directory are PostgreSQL-specific database migrations. They are used to automatically upgrade your DSpace database using [Flyway](http://flywaydb.org/). As such, these scripts are automatically called by Flyway when the DSpace -`DatabaseManager` initializes itself (see `initializeDatabase()` method). During -that process, Flyway determines which version of DSpace your database is using +`DatabaseUtils` initializes. + +During that process, Flyway determines which version of DSpace your database is using and then executes the appropriate upgrade script(s) to bring it up to the latest version. @@ -22,7 +23,7 @@ Please see the Flyway Documentation for more information: http://flywaydb.org/ The `update-sequences.sql` script in this directory may still be used to update your internal database counts if you feel they have gotten out of "sync". This may sometimes occur after large restores of content (e.g. when using the DSpace -[AIP Backup and Restore](https://wiki.duraspace.org/display/DSDOC5x/AIP+Backup+and+Restore) +[AIP Backup and Restore](https://wiki.lyrasis.org/display/DSDOC7x/AIP+Backup+and+Restore) feature). This `update-sequences.sql` script can be executed by running diff --git a/dspace/config/dspace.cfg b/dspace/config/dspace.cfg index 65b1f951fa..aee08c8e60 100644 --- a/dspace/config/dspace.cfg +++ b/dspace/config/dspace.cfg @@ -74,23 +74,15 @@ solr.multicorePrefix = # solr.client.timeToLive = 600 ##### Database settings ##### -# DSpace only supports two database types: PostgreSQL or Oracle -# PostgreSQL is highly recommended. -# Oracle support is DEPRECATED. See https://github.com/DSpace/DSpace/issues/8214 +# DSpace ONLY supports PostgreSQL at this time. # URL for connecting to database -# * Postgres template: jdbc:postgresql://localhost:5432/dspace -# * Oracle template (DEPRECATED): jdbc:oracle:thin:@//localhost:1521/xe db.url = jdbc:postgresql://localhost:5432/dspace -# JDBC Driver -# * For Postgres: org.postgresql.Driver -# * For Oracle (DEPRECATED): oracle.jdbc.OracleDriver +# JDBC Driver for PostgreSQL db.driver = org.postgresql.Driver -# Database Dialect (for Hibernate) -# * For Postgres: org.hibernate.dialect.PostgreSQL94Dialect -# * For Oracle (DEPRECATED): org.hibernate.dialect.Oracle10gDialect +# PostgreSQL Database Dialect (for Hibernate) db.dialect = org.hibernate.dialect.PostgreSQL94Dialect # Database username and password @@ -98,9 +90,7 @@ db.username = dspace db.password = dspace # Database Schema name -# * For Postgres, this is often "public" (default schema) -# * For Oracle (DEPRECATED), schema is equivalent to the username of your database account, -# so this may be set to ${db.username} in most scenarios. +# For PostgreSQL, this is often "public" (default schema) db.schema = public ## Database Connection pool parameters diff --git a/dspace/config/local.cfg.EXAMPLE b/dspace/config/local.cfg.EXAMPLE index cf13a47d76..7176ed275a 100644 --- a/dspace/config/local.cfg.EXAMPLE +++ b/dspace/config/local.cfg.EXAMPLE @@ -71,23 +71,15 @@ dspace.name = DSpace at My University ########################## # DATABASE CONFIGURATION # ########################## -# DSpace only supports two database types: PostgreSQL or Oracle -# PostgreSQL is highly recommended. -# Oracle support is DEPRECATED. See https://github.com/DSpace/DSpace/issues/8214 +# DSpace ONLY supports PostgreSQL at this time. # URL for connecting to database -# * Postgres template: jdbc:postgresql://localhost:5432/dspace -# * Oracle template (DEPRECATED): jdbc:oracle:thin:@//localhost:1521/xe db.url = jdbc:postgresql://localhost:5432/dspace -# JDBC Driver -# * For Postgres: org.postgresql.Driver -# * For Oracle (DEPRECATED): oracle.jdbc.OracleDriver +# JDBC Driver for PostgreSQL db.driver = org.postgresql.Driver -# Database Dialect (for Hibernate) -# * For Postgres: org.hibernate.dialect.PostgreSQL94Dialect -# * For Oracle (DEPRECATED): org.hibernate.dialect.Oracle10gDialect +# PostgreSQL Database Dialect (for Hibernate) db.dialect = org.hibernate.dialect.PostgreSQL94Dialect # Database username and password @@ -95,9 +87,7 @@ db.username = dspace db.password = dspace # Database Schema name -# * For Postgres, this is often "public" (default schema) -# * For Oracle (DEPRECATED), schema is equivalent to the username of your database account, -# so this may be set to ${db.username} in most scenarios. +# For PostgreSQL, this is often "public" (default schema) db.schema = public ## Connection pool parameters From 140cdc6de07a2263857372337257c56b8c4296fe Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Tue, 2 May 2023 12:09:30 -0500 Subject: [PATCH 038/125] Remove all oracle-specific migration scripts --- .../oracle/upgradeToFlyway4x.sql | 29 - ...1__CollectionCommunity_Metadata_Handle.sql | 90 --- .../rdbms/sqlmigration/oracle/README.md | 84 --- ...tial_DSpace_1.2_Oracle_database_schema.sql | 550 ------------------ .../V1.3__Upgrade_to_DSpace_1.3_schema.sql | 57 -- ...V1.4.2__Upgrade_to_DSpace_1.4.2_schema.sql | 133 ----- .../V1.4__Upgrade_to_DSpace_1.4_schema.sql | 371 ------------ .../V1.5__Upgrade_to_DSpace_1.5_schema.sql | 142 ----- .../V1.6__Upgrade_to_DSpace_1.6_schema.sql | 93 --- .../V1.7__Upgrade_to_DSpace_1.7_schema.sql | 20 - .../V1.8__Upgrade_to_DSpace_1.8_schema.sql | 23 - .../V3.0__Upgrade_to_DSpace_3.x_schema.sql | 52 -- .../V4.0__Upgrade_to_DSpace_4.x_schema.sql | 88 --- ....9_2015.10.26__DS-2818_registry_update.sql | 64 -- ...08.08__DS-1945_Helpdesk_Request_a_Copy.sql | 20 - ...9.26__DS-1582_Metadata_For_All_Objects.sql | 333 ----------- .../oracle/V5.6_2016.08.23__DS-3097.sql | 24 - ..._metadatavalue_resource_type_id_column.sql | 23 - ...015.03.07__DS-2701_Hibernate_migration.sql | 469 --------------- ..._03_06_01__DS_3378_lost_oracle_indexes.sql | 18 - .../oracle/V6.0_2016.01.03__DS-3024.sql | 25 - ...02.25__DS-3004-slow-searching-as-admin.sql | 30 - ...04.01__DS-1955_Increase_embargo_reason.sql | 25 - ...016.04.04__DS-3086-OAI-Performance-fix.sql | 46 -- ...125-fix-bundle-bitstream-delete-rights.sql | 33 -- ...DS-3168-fix-requestitem_item_id_column.sql | 24 - .../oracle/V6.0_2016.07.21__DS-2775.sql | 30 - ...6.07.26__DS-3277_fix_handle_assignment.sql | 44 -- .../oracle/V6.0_2016.08.23__DS-3097.sql | 24 - .../V6.0_2016.11.29__DS-3410-lost-indexes.sql | 17 - .../oracle/V6.0_2016.11.30__DS-3409.sql | 16 - ...2017.10.12__DS-3542-stateless-sessions.sql | 20 - .../V7.0_2018.04.16__dspace-entities.sql | 65 --- .../V7.0_2018.06.07__DS-3851-permission.sql | 24 - ....05.02__DS-4239-workflow-xml-migration.sql | 17 - ..._2019.07.31__Retrieval_of_name_variant.sql | 18 - ....13__relationship_type_copy_left_right.sql | 14 - .../V7.0_2019_06_14__scripts-and-process.sql | 40 -- ..._2020.01.08__DS-626-statistics-tracker.sql | 29 - ...V7.0_2021.01.22__Remove_basic_workflow.sql | 17 - .../oracle/V7.0_2021.02.08__tilted_rels.sql | 13 - ....18__Move_entity_type_to_dspace_schema.sql | 56 -- ..._type_from_item_template_to_collection.sql | 28 - ...ction_table_drop_workflow_stem_columns.sql | 15 - ...DV_place_after_migrating_from_DSpace_5.sql | 24 - ....3_2022.04.29__orcid_queue_and_history.sql | 54 -- .../V7.3_2022.05.16__Orcid_token_table.sql | 24 - .../V7.3_2022.06.16__process_to_group.sql | 18 - ...on_status_column_to_relationship_table.sql | 10 - ...mter_change_columns_subscription_table.sql | 45 -- ...5_2022.12.09__Supervision_Orders_table.sql | 78 --- .../V7.5_2022.12.15__system_wide_alerts.sql | 22 - ...pdate_PNG_in_bitstream_format_registry.sql | 17 - ..._and_history_descriptions_to_text_type.sql | 10 - .../sqlmigration/oracle/update-sequences.sql | 77 --- .../V5.7_2017.05.05__DS-3431.sql | 503 ---------------- ...8.11__DS-2701_Basic_Workflow_Migration.sql | 37 -- .../V6.1_2017.01.03__DS-3431.sql | 503 ---------------- ....08.11__DS-2701_Xml_Workflow_Migration.sql | 141 ----- ....0_2018.04.03__upgrade_workflow_policy.sql | 27 - .../oracle/data_workflow_migration.sql | 377 ------------ .../v6.0__DS-2701_data_workflow_migration.sql | 377 ------------ .../v6.0__DS-2701_xml_workflow_migration.sql | 124 ---- .../oracle/xml_workflow_migration.sql | 124 ---- 64 files changed, 5945 deletions(-) delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/flywayupgrade/oracle/upgradeToFlyway4x.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/metadata/oracle/V7.0_2020.10.31__CollectionCommunity_Metadata_Handle.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/README.md delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.2__Initial_DSpace_1.2_Oracle_database_schema.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.3__Upgrade_to_DSpace_1.3_schema.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.4.2__Upgrade_to_DSpace_1.4.2_schema.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.4__Upgrade_to_DSpace_1.4_schema.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.5__Upgrade_to_DSpace_1.5_schema.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.6__Upgrade_to_DSpace_1.6_schema.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.7__Upgrade_to_DSpace_1.7_schema.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.8__Upgrade_to_DSpace_1.8_schema.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V3.0__Upgrade_to_DSpace_3.x_schema.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V4.0__Upgrade_to_DSpace_4.x_schema.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V4.9_2015.10.26__DS-2818_registry_update.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.0_2014.08.08__DS-1945_Helpdesk_Request_a_Copy.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.0_2014.09.26__DS-1582_Metadata_For_All_Objects.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.6_2016.08.23__DS-3097.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.7_2017.04.11__DS-3563_Index_metadatavalue_resource_type_id_column.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2015.03.07__DS-2701_Hibernate_migration.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2015_03_06_01__DS_3378_lost_oracle_indexes.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.01.03__DS-3024.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.02.25__DS-3004-slow-searching-as-admin.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.01__DS-1955_Increase_embargo_reason.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.04__DS-3086-OAI-Performance-fix.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.14__DS-3125-fix-bundle-bitstream-delete-rights.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.05.10__DS-3168-fix-requestitem_item_id_column.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.07.21__DS-2775.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.07.26__DS-3277_fix_handle_assignment.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.08.23__DS-3097.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.11.29__DS-3410-lost-indexes.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.11.30__DS-3409.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2017.10.12__DS-3542-stateless-sessions.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2018.04.16__dspace-entities.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2018.06.07__DS-3851-permission.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.05.02__DS-4239-workflow-xml-migration.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.07.31__Retrieval_of_name_variant.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.11.13__relationship_type_copy_left_right.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019_06_14__scripts-and-process.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2020.01.08__DS-626-statistics-tracker.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.01.22__Remove_basic_workflow.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.02.08__tilted_rels.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.03.18__Move_entity_type_to_dspace_schema.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.09.24__Move_entity_type_from_item_template_to_collection.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.10.04__alter_collection_table_drop_workflow_stem_columns.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.1_2021.10.18__Fix_MDV_place_after_migrating_from_DSpace_5.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.04.29__orcid_queue_and_history.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.05.16__Orcid_token_table.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.06.16__process_to_group.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.06.20__add_last_version_status_column_to_relationship_table.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.5_2022.12.01__add_table_subscriptionparamter_change_columns_subscription_table.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.5_2022.12.09__Supervision_Orders_table.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.5_2022.12.15__system_wide_alerts.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.03.24__Update_PNG_in_bitstream_format_registry.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.03.29__orcid_queue_and_history_descriptions_to_text_type.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/update-sequences.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V5.7_2017.05.05__DS-3431.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V6.0_2015.08.11__DS-2701_Basic_Workflow_Migration.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V6.1_2017.01.03__DS-3431.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/xmlworkflow/V6.0_2015.08.11__DS-2701_Xml_Workflow_Migration.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/xmlworkflow/V7.0_2018.04.03__upgrade_workflow_policy.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/data_workflow_migration.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/v6.0__DS-2701_data_workflow_migration.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/v6.0__DS-2701_xml_workflow_migration.sql delete mode 100644 dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/xml_workflow_migration.sql diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/flywayupgrade/oracle/upgradeToFlyway4x.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/flywayupgrade/oracle/upgradeToFlyway4x.sql deleted file mode 100644 index 7907fccc00..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/flywayupgrade/oracle/upgradeToFlyway4x.sql +++ /dev/null @@ -1,29 +0,0 @@ --- --- Copyright 2010-2017 Boxfuse GmbH --- --- Licensed under the Apache License, Version 2.0 (the "License"); --- you may not use this file except in compliance with the License. --- You may obtain a copy of the License at --- --- http://www.apache.org/licenses/LICENSE-2.0 --- --- Unless required by applicable law or agreed to in writing, software --- distributed under the License is distributed on an "AS IS" BASIS, --- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. --- See the License for the specific language governing permissions and --- limitations under the License. --- ------------------ --- This is the Oracle upgrade script from Flyway v4.2.0, copied/borrowed from: --- https://github.com/flyway/flyway/blob/flyway-4.2.0/flyway-core/src/main/resources/org/flywaydb/core/internal/dbsupport/oracle/upgradeMetaDataTable.sql --- --- The variables in this script are replaced in FlywayUpgradeUtils.upgradeFlywayTable() ------------------- - -DROP INDEX "${schema}"."${table}_vr_idx"; -DROP INDEX "${schema}"."${table}_ir_idx"; -ALTER TABLE "${schema}"."${table}" DROP COLUMN "version_rank"; -ALTER TABLE "${schema}"."${table}" DROP PRIMARY KEY DROP INDEX; -ALTER TABLE "${schema}"."${table}" MODIFY "version" NULL; -ALTER TABLE "${schema}"."${table}" ADD CONSTRAINT "${table}_pk" PRIMARY KEY ("installed_rank"); -UPDATE "${schema}"."${table}" SET "type"='BASELINE' WHERE "type"='INIT'; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/metadata/oracle/V7.0_2020.10.31__CollectionCommunity_Metadata_Handle.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/metadata/oracle/V7.0_2020.10.31__CollectionCommunity_Metadata_Handle.sql deleted file mode 100644 index fff1fe154f..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/metadata/oracle/V7.0_2020.10.31__CollectionCommunity_Metadata_Handle.sql +++ /dev/null @@ -1,90 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -------------------------------------------------------------- --- This will create COMMUNITY handle metadata -------------------------------------------------------------- - -insert into metadatavalue (metadata_field_id, text_value, text_lang, place, authority, confidence, dspace_object_id) - select distinct - T1.metadata_field_id as metadata_field_id, - concat('${handle.canonical.prefix}', h.handle) as text_value, - null as text_lang, 0 as place, - null as authority, - -1 as confidence, - c.uuid as dspace_object_id - - from community c - left outer join handle h on h.resource_id = c.uuid - left outer join metadatavalue mv on mv.dspace_object_id = c.uuid - left outer join metadatafieldregistry mfr on mv.metadata_field_id = mfr.metadata_field_id - left outer join metadataschemaregistry msr on mfr.metadata_schema_id = msr.metadata_schema_id - - cross join (select mfr.metadata_field_id as metadata_field_id from metadatafieldregistry mfr - left outer join metadataschemaregistry msr on mfr.metadata_schema_id = msr.metadata_schema_id - where msr.short_id = 'dc' - and mfr.element = 'identifier' - and mfr.qualifier = 'uri') T1 - - where uuid not in ( - select c.uuid as uuid from community c - left outer join handle h on h.resource_id = c.uuid - left outer join metadatavalue mv on mv.dspace_object_id = c.uuid - left outer join metadatafieldregistry mfr on mv.metadata_field_id = mfr.metadata_field_id - left outer join metadataschemaregistry msr on mfr.metadata_schema_id = msr.metadata_schema_id - where msr.short_id = 'dc' - and mfr.element = 'identifier' - and mfr.qualifier = 'uri' - ) -; - -------------------------------------------------------------- --- This will create COLLECTION handle metadata -------------------------------------------------------------- - -insert into metadatavalue (metadata_field_id, text_value, text_lang, place, authority, confidence, dspace_object_id) - select distinct - T1.metadata_field_id as metadata_field_id, - concat('${handle.canonical.prefix}', h.handle) as text_value, - null as text_lang, 0 as place, - null as authority, - -1 as confidence, - c.uuid as dspace_object_id - - from collection c - left outer join handle h on h.resource_id = c.uuid - left outer join metadatavalue mv on mv.dspace_object_id = c.uuid - left outer join metadatafieldregistry mfr on mv.metadata_field_id = mfr.metadata_field_id - left outer join metadataschemaregistry msr on mfr.metadata_schema_id = msr.metadata_schema_id - - cross join (select mfr.metadata_field_id as metadata_field_id from metadatafieldregistry mfr - left outer join metadataschemaregistry msr on mfr.metadata_schema_id = msr.metadata_schema_id - where msr.short_id = 'dc' - and mfr.element = 'identifier' - and mfr.qualifier = 'uri') T1 - - where uuid not in ( - select c.uuid as uuid from collection c - left outer join handle h on h.resource_id = c.uuid - left outer join metadatavalue mv on mv.dspace_object_id = c.uuid - left outer join metadatafieldregistry mfr on mv.metadata_field_id = mfr.metadata_field_id - left outer join metadataschemaregistry msr on mfr.metadata_schema_id = msr.metadata_schema_id - where msr.short_id = 'dc' - and mfr.element = 'identifier' - and mfr.qualifier = 'uri' - ) -; - diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/README.md b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/README.md deleted file mode 100644 index 6cef123859..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/README.md +++ /dev/null @@ -1,84 +0,0 @@ -# Oracle Flyway Database Migrations (i.e. Upgrades) - ---- -WARNING: Oracle Support is deprecated. -See https://github.com/DSpace/DSpace/issues/8214 ---- - -The SQL scripts in this directory are Oracle-specific database migrations. They are -used to automatically upgrade your DSpace database using [Flyway](http://flywaydb.org/). -As such, these scripts are automatically called by Flyway when the DSpace -`DatabaseManager` initializes itself (see `initializeDatabase()` method). During -that process, Flyway determines which version of DSpace your database is using -and then executes the appropriate upgrade script(s) to bring it up to the latest -version. - -If any failures occur, Flyway will "rollback" the upgrade script which resulted -in an error and log the issue in the DSpace log file at `[dspace]/log/dspace.log.[date]` - -**WARNING:** IT IS NOT RECOMMENDED TO RUN THESE SCRIPTS MANUALLY. If you do so, -Flyway will may throw failures the next time you startup DSpace, as Flyway will -not realize you manually ran one or more scripts. - -Please see the Flyway Documentation for more information: http://flywaydb.org/ - -## Oracle Porting Notes for the Curious - -Oracle is missing quite a number of cool features found in Postgres, so -workarounds had to be found, most of which are hidden behind tests in -DatabaseManager. If Oracle is your DBMS, the workarounds are activated: - -Oracle doesn't like ';' characters in JDBC SQL - they have all been removed -from the DSpace source, including code in the .sql file reader to strip ;'s. - -browse code - LIMIT and OFFSET is used to limit browse results, and an -Oracle-hack is used to limit the result set to a given size - -Oracle has no boolean data type, so a new schema file was created that -uses NUMBER(1) (AKA 'integers') and code is inserted everywhere to use 0 for -false and 1 for true if DSpace is using Oracle. - -Oracle doesn't have a TEXT data type either, so TEXT columns are defined -as VARCHAR2 in the Oracle-specific schema. - -Oracle doesn't allow dynamic naming for objects, so our cute trick to -derive the name of the sequence by appending _seq to the table name -in a function doesn't work in Oracle - workaround is to insert Oracle -code to generate the name of the sequence and then place that into -our SQL calls to generate a new ID. - -Oracle doesn't let you directly set the value of sequences, so -update-sequences.sql is forced to use a special script sequpdate.sql -to update the sequences. - -Bitstream had a column 'size' which is a reserved word in Oracle, -so this had to be changed to 'size_bytes' with corresponding code changes. - -VARCHAR2 has a limit of 4000 characters, so DSpace text data is limited to 4k. -Going to the CLOB data type can get around that, but seemed like too much effort -for now. Note that with UTF-8 encoding that 4k could translate to 1300 -characters worst-case (every character taking up 3 bytes is the worst case -scenario.) - -### UPDATE 5 April 2007 - -CLOBs are now used as follows: -MetadataValue:text_value -Community:introductory_text -Community:copyright_text -Collection:introductory_text -Collection:license -Collection:copyright_text - -DatabaseManager had to have some of the type checking changed, because Oracle's -JDBC driver is reporting INTEGERS as type DECIMAL. - -Oracle doesn't like it when you reference table names in lower case when -getting JDBC metadata for the tables, so they are converted in TableRow -to upper case. - -### UPDATE 27 November 2012 - -Oracle complains with ORA-01408 if you attempt to create an index on a column which -has already had the UNIQUE contraint added (such an index is implicit in maintaining the uniqueness -of the column). See [DS-1370](https://jira.duraspace.org/browse/DS-1370) for details. diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.2__Initial_DSpace_1.2_Oracle_database_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.2__Initial_DSpace_1.2_Oracle_database_schema.sql deleted file mode 100644 index 157274e05d..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.2__Initial_DSpace_1.2_Oracle_database_schema.sql +++ /dev/null @@ -1,550 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -CREATE SEQUENCE bitstreamformatregistry_seq; -CREATE SEQUENCE fileextension_seq; -CREATE SEQUENCE bitstream_seq; -CREATE SEQUENCE eperson_seq; --- start group sequence at 0, since Anonymous group = 0 -CREATE SEQUENCE epersongroup_seq MINVALUE 0 START WITH 0; -CREATE SEQUENCE item_seq; -CREATE SEQUENCE bundle_seq; -CREATE SEQUENCE item2bundle_seq; -CREATE SEQUENCE bundle2bitstream_seq; -CREATE SEQUENCE dctyperegistry_seq; -CREATE SEQUENCE dcvalue_seq; -CREATE SEQUENCE community_seq; -CREATE SEQUENCE collection_seq; -CREATE SEQUENCE community2community_seq; -CREATE SEQUENCE community2collection_seq; -CREATE SEQUENCE collection2item_seq; -CREATE SEQUENCE resourcepolicy_seq; -CREATE SEQUENCE epersongroup2eperson_seq; -CREATE SEQUENCE handle_seq; -CREATE SEQUENCE workspaceitem_seq; -CREATE SEQUENCE workflowitem_seq; -CREATE SEQUENCE tasklistitem_seq; -CREATE SEQUENCE registrationdata_seq; -CREATE SEQUENCE subscription_seq; -CREATE SEQUENCE history_seq; -CREATE SEQUENCE historystate_seq; -CREATE SEQUENCE communities2item_seq; -CREATE SEQUENCE itemsbyauthor_seq; -CREATE SEQUENCE itemsbytitle_seq; -CREATE SEQUENCE itemsbydate_seq; -CREATE SEQUENCE itemsbydateaccessioned_seq; - - -------------------------------------------------------- --- BitstreamFormatRegistry table -------------------------------------------------------- -CREATE TABLE BitstreamFormatRegistry -( - bitstream_format_id INTEGER PRIMARY KEY, - mimetype VARCHAR2(48), - short_description VARCHAR2(128) UNIQUE, - description VARCHAR2(2000), - support_level INTEGER, - -- Identifies internal types - internal NUMBER(1) -); - -------------------------------------------------------- --- FileExtension table -------------------------------------------------------- -CREATE TABLE FileExtension -( - file_extension_id INTEGER PRIMARY KEY, - bitstream_format_id INTEGER REFERENCES BitstreamFormatRegistry(bitstream_format_id), - extension VARCHAR2(16) -); - -------------------------------------------------------- --- Bitstream table -------------------------------------------------------- -CREATE TABLE Bitstream -( - bitstream_id INTEGER PRIMARY KEY, - bitstream_format_id INTEGER REFERENCES BitstreamFormatRegistry(bitstream_format_id), - name VARCHAR2(256), - size_bytes INTEGER, - checksum VARCHAR2(64), - checksum_algorithm VARCHAR2(32), - description VARCHAR2(2000), - user_format_description VARCHAR2(2000), - source VARCHAR2(256), - internal_id VARCHAR2(256), - deleted NUMBER(1), - store_number INTEGER, - sequence_id INTEGER -); - -------------------------------------------------------- --- EPerson table -------------------------------------------------------- -CREATE TABLE EPerson -( - eperson_id INTEGER PRIMARY KEY, - email VARCHAR2(64) UNIQUE, - password VARCHAR2(64), - firstname VARCHAR2(64), - lastname VARCHAR2(64), - can_log_in NUMBER(1), - require_certificate NUMBER(1), - self_registered NUMBER(1), - last_active TIMESTAMP, - sub_frequency INTEGER, - phone VARCHAR2(32) -); - -------------------------------------------------------- --- EPersonGroup table -------------------------------------------------------- -CREATE TABLE EPersonGroup -( - eperson_group_id INTEGER PRIMARY KEY, - name VARCHAR2(256) UNIQUE -); - -------------------------------------------------------- --- Item table -------------------------------------------------------- -CREATE TABLE Item -( - item_id INTEGER PRIMARY KEY, - submitter_id INTEGER REFERENCES EPerson(eperson_id), - in_archive NUMBER(1), - withdrawn NUMBER(1), - last_modified TIMESTAMP, - owning_collection INTEGER -); - -------------------------------------------------------- --- Bundle table -------------------------------------------------------- -CREATE TABLE Bundle -( - bundle_id INTEGER PRIMARY KEY, - mets_bitstream_id INTEGER REFERENCES Bitstream(bitstream_id), - name VARCHAR2(16), -- ORIGINAL | THUMBNAIL | TEXT - primary_bitstream_id INTEGER REFERENCES Bitstream(bitstream_id) -); - -------------------------------------------------------- --- Item2Bundle table -------------------------------------------------------- -CREATE TABLE Item2Bundle -( - id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - bundle_id INTEGER REFERENCES Bundle(bundle_id) -); - --- index by item_id -CREATE INDEX item2bundle_item_idx on Item2Bundle(item_id); - -------------------------------------------------------- --- Bundle2Bitstream table -------------------------------------------------------- -CREATE TABLE Bundle2Bitstream -( - id INTEGER PRIMARY KEY, - bundle_id INTEGER REFERENCES Bundle(bundle_id), - bitstream_id INTEGER REFERENCES Bitstream(bitstream_id) -); - --- index by bundle_id -CREATE INDEX bundle2bitstream_bundle_idx ON Bundle2Bitstream(bundle_id); - -------------------------------------------------------- --- DCTypeRegistry table -------------------------------------------------------- -CREATE TABLE DCTypeRegistry -( - dc_type_id INTEGER PRIMARY KEY, - element VARCHAR2(64), - qualifier VARCHAR2(64), - scope_note VARCHAR2(2000), - UNIQUE(element, qualifier) -); - -------------------------------------------------------- --- DCValue table -------------------------------------------------------- -CREATE TABLE DCValue -( - dc_value_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - dc_type_id INTEGER REFERENCES DCTypeRegistry(dc_type_id), - text_value VARCHAR2(2000), - text_lang VARCHAR2(24), - place INTEGER, - source_id INTEGER -); - --- An index for item_id - almost all access is based on --- instantiating the item object, which grabs all dcvalues --- related to that item -CREATE INDEX dcvalue_item_idx on DCValue(item_id); - -------------------------------------------------------- --- Community table -------------------------------------------------------- -CREATE TABLE Community -( - community_id INTEGER PRIMARY KEY, - name VARCHAR2(128) UNIQUE, - short_description VARCHAR2(512), - introductory_text VARCHAR2(2000), - logo_bitstream_id INTEGER REFERENCES Bitstream(bitstream_id), - copyright_text VARCHAR2(2000), - side_bar_text VARCHAR2(2000) -); - -------------------------------------------------------- --- Collection table -------------------------------------------------------- -CREATE TABLE Collection -( - collection_id INTEGER PRIMARY KEY, - name VARCHAR2(128), - short_description VARCHAR2(512), - introductory_text VARCHAR2(2000), - logo_bitstream_id INTEGER REFERENCES Bitstream(bitstream_id), - template_item_id INTEGER REFERENCES Item(item_id), - provenance_description VARCHAR2(2000), - license VARCHAR2(2000), - copyright_text VARCHAR2(2000), - side_bar_text VARCHAR2(2000), - workflow_step_1 INTEGER REFERENCES EPersonGroup( eperson_group_id ), - workflow_step_2 INTEGER REFERENCES EPersonGroup( eperson_group_id ), - workflow_step_3 INTEGER REFERENCES EPersonGroup( eperson_group_id ) -); - -------------------------------------------------------- --- Community2Community table -------------------------------------------------------- -CREATE TABLE Community2Community -( - id INTEGER PRIMARY KEY, - parent_comm_id INTEGER REFERENCES Community(community_id), - child_comm_id INTEGER REFERENCES Community(community_id) -); - -------------------------------------------------------- --- Community2Collection table -------------------------------------------------------- -CREATE TABLE Community2Collection -( - id INTEGER PRIMARY KEY, - community_id INTEGER REFERENCES Community(community_id), - collection_id INTEGER REFERENCES Collection(collection_id) -); - -------------------------------------------------------- --- Collection2Item table -------------------------------------------------------- -CREATE TABLE Collection2Item -( - id INTEGER PRIMARY KEY, - collection_id INTEGER REFERENCES Collection(collection_id), - item_id INTEGER REFERENCES Item(item_id) -); - --- index by collection_id -CREATE INDEX collection2item_collection_idx ON Collection2Item(collection_id); - -------------------------------------------------------- --- ResourcePolicy table -------------------------------------------------------- -CREATE TABLE ResourcePolicy -( - policy_id INTEGER PRIMARY KEY, - resource_type_id INTEGER, - resource_id INTEGER, - action_id INTEGER, - eperson_id INTEGER REFERENCES EPerson(eperson_id), - epersongroup_id INTEGER REFERENCES EPersonGroup(eperson_group_id), - start_date DATE, - end_date DATE -); - --- index by resource_type,resource_id - all queries by --- authorization manager are select type=x, id=y, action=z -CREATE INDEX resourcepolicy_type_id_idx ON ResourcePolicy(resource_type_id,resource_id); - -------------------------------------------------------- --- EPersonGroup2EPerson table -------------------------------------------------------- -CREATE TABLE EPersonGroup2EPerson -( - id INTEGER PRIMARY KEY, - eperson_group_id INTEGER REFERENCES EPersonGroup(eperson_group_id), - eperson_id INTEGER REFERENCES EPerson(eperson_id) -); - --- Index by group ID (used heavily by AuthorizeManager) -CREATE INDEX epersongroup2eperson_group_idx on EPersonGroup2EPerson(eperson_group_id); - - -------------------------------------------------------- --- Handle table -------------------------------------------------------- -CREATE TABLE Handle -( - handle_id INTEGER PRIMARY KEY, - handle VARCHAR2(256) UNIQUE, - resource_type_id INTEGER, - resource_id INTEGER -); - -------------------------------------------------------- --- WorkspaceItem table -------------------------------------------------------- -CREATE TABLE WorkspaceItem -( - workspace_item_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - collection_id INTEGER REFERENCES Collection(collection_id), - -- Answers to questions on first page of submit UI - multiple_titles NUMBER(1), -- boolean - published_before NUMBER(1), - multiple_files NUMBER(1), - -- How for the user has got in the submit process - stage_reached INTEGER -); - -------------------------------------------------------- --- WorkflowItem table -------------------------------------------------------- -CREATE TABLE WorkflowItem -( - workflow_id INTEGER PRIMARY KEY, - item_id INTEGER UNIQUE REFERENCES Item(item_id), - collection_id INTEGER REFERENCES Collection(collection_id), - state INTEGER, - owner INTEGER REFERENCES EPerson(eperson_id), - - -- Answers to questions on first page of submit UI - multiple_titles NUMBER(1), - published_before NUMBER(1), - multiple_files NUMBER(1) - -- Note: stage reached not applicable here - people involved in workflow - -- can always jump around submission UI - -); - -------------------------------------------------------- --- TasklistItem table -------------------------------------------------------- -CREATE TABLE TasklistItem -( - tasklist_id INTEGER PRIMARY KEY, - eperson_id INTEGER REFERENCES EPerson(eperson_id), - workflow_id INTEGER REFERENCES WorkflowItem(workflow_id) -); - - -------------------------------------------------------- --- RegistrationData table -------------------------------------------------------- -CREATE TABLE RegistrationData -( - registrationdata_id INTEGER PRIMARY KEY, - email VARCHAR2(64) UNIQUE, - token VARCHAR2(48), - expires TIMESTAMP -); - - -------------------------------------------------------- --- Subscription table -------------------------------------------------------- -CREATE TABLE Subscription -( - subscription_id INTEGER PRIMARY KEY, - eperson_id INTEGER REFERENCES EPerson(eperson_id), - collection_id INTEGER REFERENCES Collection(collection_id) -); - - -------------------------------------------------------- --- History table -------------------------------------------------------- -CREATE TABLE History -( - history_id INTEGER PRIMARY KEY, - -- When it was stored - creation_date TIMESTAMP, - -- A checksum to keep INTEGERizations from being stored more than once - checksum VARCHAR2(32) UNIQUE -); - -------------------------------------------------------- --- HistoryState table -------------------------------------------------------- -CREATE TABLE HistoryState -( - history_state_id INTEGER PRIMARY KEY, - object_id VARCHAR2(64) -); - ------------------------------------------------------------- --- Browse subsystem tables and views ------------------------------------------------------------- - -------------------------------------------------------- --- Communities2Item table -------------------------------------------------------- -CREATE TABLE Communities2Item -( - id INTEGER PRIMARY KEY, - community_id INTEGER REFERENCES Community(community_id), - item_id INTEGER REFERENCES Item(item_id) -); - -------------------------------------------------------- --- Community2Item view ------------------------------------------------------- -CREATE VIEW Community2Item as -SELECT Community2Collection.community_id, Collection2Item.item_id -FROM Community2Collection, Collection2Item -WHERE Collection2Item.collection_id = Community2Collection.collection_id -; - -------------------------------------------------------- --- ItemsByAuthor table -------------------------------------------------------- -CREATE TABLE ItemsByAuthor -( - items_by_author_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - author VARCHAR2(2000), - sort_author VARCHAR2(2000) -); - --- index by sort_author, of course! -CREATE INDEX sort_author_idx on ItemsByAuthor(sort_author); - -------------------------------------------------------- --- CollectionItemsByAuthor view -------------------------------------------------------- -CREATE VIEW CollectionItemsByAuthor as -SELECT Collection2Item.collection_id, ItemsByAuthor.* -FROM ItemsByAuthor, Collection2Item -WHERE ItemsByAuthor.item_id = Collection2Item.item_id -; - -------------------------------------------------------- --- CommunityItemsByAuthor view -------------------------------------------------------- -CREATE VIEW CommunityItemsByAuthor as -SELECT Communities2Item.community_id, ItemsByAuthor.* -FROM ItemsByAuthor, Communities2Item -WHERE ItemsByAuthor.item_id = Communities2Item.item_id -; - ----------------------------------------- --- ItemsByTitle table ----------------------------------------- -CREATE TABLE ItemsByTitle -( - items_by_title_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - title VARCHAR2(2000), - sort_title VARCHAR2(2000) -); - --- index by the sort_title -CREATE INDEX sort_title_idx on ItemsByTitle(sort_title); - - -------------------------------------------------------- --- CollectionItemsByTitle view -------------------------------------------------------- -CREATE VIEW CollectionItemsByTitle as -SELECT Collection2Item.collection_id, ItemsByTitle.* -FROM ItemsByTitle, Collection2Item -WHERE ItemsByTitle.item_id = Collection2Item.item_id -; - -------------------------------------------------------- --- CommunityItemsByTitle view -------------------------------------------------------- -CREATE VIEW CommunityItemsByTitle as -SELECT Communities2Item.community_id, ItemsByTitle.* -FROM ItemsByTitle, Communities2Item -WHERE ItemsByTitle.item_id = Communities2Item.item_id -; - -------------------------------------------------------- --- ItemsByDate table -------------------------------------------------------- -CREATE TABLE ItemsByDate -( - items_by_date_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - date_issued VARCHAR2(2000) -); - --- sort by date -CREATE INDEX date_issued_idx on ItemsByDate(date_issued); - -------------------------------------------------------- --- CollectionItemsByDate view -------------------------------------------------------- -CREATE VIEW CollectionItemsByDate as -SELECT Collection2Item.collection_id, ItemsByDate.* -FROM ItemsByDate, Collection2Item -WHERE ItemsByDate.item_id = Collection2Item.item_id -; - -------------------------------------------------------- --- CommunityItemsByDate view -------------------------------------------------------- -CREATE VIEW CommunityItemsByDate as -SELECT Communities2Item.community_id, ItemsByDate.* -FROM ItemsByDate, Communities2Item -WHERE ItemsByDate.item_id = Communities2Item.item_id -; - -------------------------------------------------------- --- ItemsByDateAccessioned table -------------------------------------------------------- -CREATE TABLE ItemsByDateAccessioned -( - items_by_date_accessioned_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - date_accessioned VARCHAR2(2000) -); - -------------------------------------------------------- --- CollectionItemsByDateAccession view -------------------------------------------------------- -CREATE VIEW CollectionItemsByDateAccession as -SELECT Collection2Item.collection_id, ItemsByDateAccessioned.* -FROM ItemsByDateAccessioned, Collection2Item -WHERE ItemsByDateAccessioned.item_id = Collection2Item.item_id -; - -------------------------------------------------------- --- CommunityItemsByDateAccession view -------------------------------------------------------- -CREATE VIEW CommunityItemsByDateAccession as -SELECT Communities2Item.community_id, ItemsByDateAccessioned.* -FROM ItemsByDateAccessioned, Communities2Item -WHERE ItemsByDateAccessioned.item_id = Communities2Item.item_id -; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.3__Upgrade_to_DSpace_1.3_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.3__Upgrade_to_DSpace_1.3_schema.sql deleted file mode 100644 index 37d7e115eb..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.3__Upgrade_to_DSpace_1.3_schema.sql +++ /dev/null @@ -1,57 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -CREATE SEQUENCE epersongroup2workspaceitem_seq; - -------------------------------------------------------------------------------- --- create the new EPersonGroup2WorkspaceItem table -------------------------------------------------------------------------------- - -CREATE TABLE EPersonGroup2WorkspaceItem -( - id INTEGER PRIMARY KEY, - eperson_group_id INTEGER REFERENCES EPersonGroup(eperson_group_id), - workspace_item_id INTEGER REFERENCES WorkspaceItem(workspace_item_id) -); - -------------------------------------------------------------------------------- --- modification to collection table to support being able to change the --- submitter and collection admin group names -------------------------------------------------------------------------------- -ALTER TABLE collection ADD submitter INTEGER REFERENCES EPersonGroup(eperson_group_id); - -ALTER TABLE collection ADD admin INTEGER REFERENCES EPersonGroup(eperson_group_id); - -ALTER TABLE eperson ADD netid VARCHAR2(64) UNIQUE; - -------------------------------------------------------------------------------- --- Additional indices for performance -------------------------------------------------------------------------------- - --- index by resource id and resource type id -CREATE INDEX handle_resource_id_type_idx ON handle(resource_id, resource_type_id); - --- Indexing browse tables update/re-index performance -CREATE INDEX Communities2Item_item_id_idx ON Communities2Item( item_id ); -CREATE INDEX ItemsByAuthor_item_id_idx ON ItemsByAuthor(item_id); -CREATE INDEX ItemsByTitle_item_id_idx ON ItemsByTitle(item_id); -CREATE INDEX ItemsByDate_item_id_idx ON ItemsByDate(item_id); -CREATE INDEX ItemsByDateAcc_item_id_idx ON ItemsByDateAccessioned(item_id); - --- Improve mapping tables -CREATE INDEX Com2Coll_community_id_idx ON Community2Collection(community_id); -CREATE INDEX Com2Coll_collection_id_idx ON Community2Collection(collection_id); -CREATE INDEX Coll2Item_item_id_idx ON Collection2Item( item_id ); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.4.2__Upgrade_to_DSpace_1.4.2_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.4.2__Upgrade_to_DSpace_1.4.2_schema.sql deleted file mode 100644 index a713ced8bb..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.4.2__Upgrade_to_DSpace_1.4.2_schema.sql +++ /dev/null @@ -1,133 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - ---------------------------------------- --- Update MetadataValue to include CLOB ---------------------------------------- - -CREATE TABLE MetadataValueTemp -( - metadata_value_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - metadata_field_id INTEGER REFERENCES MetadataFieldRegistry(metadata_field_id), - text_value CLOB, - text_lang VARCHAR(64), - place INTEGER -); - -INSERT INTO MetadataValueTemp -SELECT * FROM MetadataValue; - -DROP VIEW dcvalue; -DROP TABLE MetadataValue; -ALTER TABLE MetadataValueTemp RENAME TO MetadataValue; - -CREATE VIEW dcvalue AS - SELECT MetadataValue.metadata_value_id AS "dc_value_id", MetadataValue.item_id, - MetadataValue.metadata_field_id AS "dc_type_id", MetadataValue.text_value, - MetadataValue.text_lang, MetadataValue.place - FROM MetadataValue, MetadataFieldRegistry - WHERE MetadataValue.metadata_field_id = MetadataFieldRegistry.metadata_field_id - AND MetadataFieldRegistry.metadata_schema_id = 1; - -CREATE INDEX metadatavalue_item_idx ON MetadataValue(item_id); -CREATE INDEX metadatavalue_item_idx2 ON MetadataValue(item_id,metadata_field_id); - ------------------------------------- --- Update Community to include CLOBs ------------------------------------- - -CREATE TABLE CommunityTemp -( - community_id INTEGER PRIMARY KEY, - name VARCHAR2(128), - short_description VARCHAR2(512), - introductory_text CLOB, - logo_bitstream_id INTEGER REFERENCES Bitstream(bitstream_id), - copyright_text CLOB, - side_bar_text VARCHAR2(2000) -); - -INSERT INTO CommunityTemp -SELECT * FROM Community; - -DROP TABLE Community CASCADE CONSTRAINTS; -ALTER TABLE CommunityTemp RENAME TO Community; - -ALTER TABLE Community2Community ADD CONSTRAINT fk_c2c_parent -FOREIGN KEY (parent_comm_id) -REFERENCES Community (community_id); - -ALTER TABLE Community2Community ADD CONSTRAINT fk_c2c_child -FOREIGN KEY (child_comm_id) -REFERENCES Community (community_id); - -ALTER TABLE Community2Collection ADD CONSTRAINT fk_c2c_community -FOREIGN KEY (community_id) -REFERENCES Community (community_id); - -ALTER TABLE Communities2Item ADD CONSTRAINT fk_c2i_community -FOREIGN KEY (community_id) -REFERENCES Community (community_id); - -------------------------------------- --- Update Collection to include CLOBs -------------------------------------- - -CREATE TABLE CollectionTemp -( - collection_id INTEGER PRIMARY KEY, - name VARCHAR2(128), - short_description VARCHAR2(512), - introductory_text CLOB, - logo_bitstream_id INTEGER REFERENCES Bitstream(bitstream_id), - template_item_id INTEGER REFERENCES Item(item_id), - provenance_description VARCHAR2(2000), - license CLOB, - copyright_text CLOB, - side_bar_text VARCHAR2(2000), - workflow_step_1 INTEGER REFERENCES EPersonGroup( eperson_group_id ), - workflow_step_2 INTEGER REFERENCES EPersonGroup( eperson_group_id ), - workflow_step_3 INTEGER REFERENCES EPersonGroup( eperson_group_id ), - submitter INTEGER REFERENCES EPersonGroup( eperson_group_id ), - admin INTEGER REFERENCES EPersonGroup( eperson_group_id ) -); - -INSERT INTO CollectionTemp -SELECT * FROM Collection; - -DROP TABLE Collection CASCADE CONSTRAINTS; -ALTER TABLE CollectionTemp RENAME TO Collection; - -ALTER TABLE Community2Collection ADD CONSTRAINT fk_c2c_collection -FOREIGN KEY (collection_id) -REFERENCES Collection (collection_id); - -ALTER TABLE Collection2Item ADD CONSTRAINT fk_c2i_collection -FOREIGN KEY (collection_id) -REFERENCES Collection (collection_id); - -ALTER TABLE WorkspaceItem ADD CONSTRAINT fk_wsi_collection -FOREIGN KEY (collection_id) -REFERENCES Collection (collection_id); - -ALTER TABLE WorkflowItem ADD CONSTRAINT fk_wfi_collection -FOREIGN KEY (collection_id) -REFERENCES Collection (collection_id); - -ALTER TABLE Subscription ADD CONSTRAINT fk_subs_collection -FOREIGN KEY (collection_id) -REFERENCES Collection (collection_id); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.4__Upgrade_to_DSpace_1.4_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.4__Upgrade_to_DSpace_1.4_schema.sql deleted file mode 100644 index 54cf10067b..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.4__Upgrade_to_DSpace_1.4_schema.sql +++ /dev/null @@ -1,371 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -------------------------------------------------------------------------------- --- Sequences for Group within Group feature -------------------------------------------------------------------------------- -CREATE SEQUENCE group2group_seq; -CREATE SEQUENCE group2groupcache_seq; - ------------------------------------------------------- --- Group2Group table, records group membership in other groups ------------------------------------------------------- -CREATE TABLE Group2Group -( - id INTEGER PRIMARY KEY, - parent_id INTEGER REFERENCES EPersonGroup(eperson_group_id), - child_id INTEGER REFERENCES EPersonGroup(eperson_group_id) -); - ------------------------------------------------------- --- Group2GroupCache table, is the 'unwound' hierarchy in --- Group2Group. It explicitly names every parent child --- relationship, even with nested groups. For example, --- If Group2Group lists B is a child of A and C is a child of B, --- this table will have entries for parent(A,B), and parent(B,C) --- AND parent(A,C) so that all of the child groups of A can be --- looked up in a single simple query ------------------------------------------------------- -CREATE TABLE Group2GroupCache -( - id INTEGER PRIMARY KEY, - parent_id INTEGER REFERENCES EPersonGroup(eperson_group_id), - child_id INTEGER REFERENCES EPersonGroup(eperson_group_id) -); - - -------------------------------------------------------- --- New Metadata Tables and Sequences -------------------------------------------------------- -CREATE SEQUENCE metadataschemaregistry_seq; -CREATE SEQUENCE metadatafieldregistry_seq; -CREATE SEQUENCE metadatavalue_seq; - --- MetadataSchemaRegistry table -CREATE TABLE MetadataSchemaRegistry -( - metadata_schema_id INTEGER PRIMARY KEY, - namespace VARCHAR(256) UNIQUE, - short_id VARCHAR(32) -); - --- MetadataFieldRegistry table -CREATE TABLE MetadataFieldRegistry -( - metadata_field_id INTEGER PRIMARY KEY, - metadata_schema_id INTEGER NOT NULL REFERENCES MetadataSchemaRegistry(metadata_schema_id), - element VARCHAR(64), - qualifier VARCHAR(64), - scope_note VARCHAR2(2000) -); - --- MetadataValue table -CREATE TABLE MetadataValue -( - metadata_value_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - metadata_field_id INTEGER REFERENCES MetadataFieldRegistry(metadata_field_id), - text_value VARCHAR2(2000), - text_lang VARCHAR(24), - place INTEGER -); - --- Create the DC schema -INSERT INTO MetadataSchemaRegistry VALUES (1,'http://dublincore.org/documents/dcmi-terms/','dc'); - --- Migrate the existing DCTypes into the new metadata field registry -INSERT INTO MetadataFieldRegistry - (metadata_schema_id, metadata_field_id, element, qualifier, scope_note) - SELECT '1' AS metadata_schema_id, dc_type_id, element, - qualifier, scope_note FROM dctyperegistry; - --- Copy the DCValues into the new MetadataValue table -INSERT INTO MetadataValue (item_id, metadata_field_id, text_value, text_lang, place) - SELECT item_id, dc_type_id, text_value, text_lang, place FROM dcvalue; - -DROP TABLE dcvalue; -CREATE VIEW dcvalue AS - SELECT MetadataValue.metadata_value_id AS "dc_value_id", MetadataValue.item_id, - MetadataValue.metadata_field_id AS "dc_type_id", MetadataValue.text_value, - MetadataValue.text_lang, MetadataValue.place - FROM MetadataValue, MetadataFieldRegistry - WHERE MetadataValue.metadata_field_id = MetadataFieldRegistry.metadata_field_id - AND MetadataFieldRegistry.metadata_schema_id = 1; - - --- After copying data from dctypregistry to metadataschemaregistry, we need to reset our sequences --- Update metadatafieldregistry_seq to new max value -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(metadata_field_id) INTO curr FROM metadatafieldregistry; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE metadatafieldregistry_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE metadatafieldregistry_seq START WITH ' || NVL(curr,1); -END; -/ --- Update metadatavalue_seq to new max value -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(metadata_value_id) INTO curr FROM metadatavalue; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE metadatavalue_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE metadatavalue_seq START WITH ' || NVL(curr,1); -END; -/ --- Update metadataschemaregistry_seq to new max value -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(metadata_schema_id) INTO curr FROM metadataschemaregistry; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE metadataschemaregistry_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE metadataschemaregistry_seq START WITH ' || NVL(curr,1); -END; -/ - --- Drop the old dctyperegistry -DROP TABLE dctyperegistry; - --- create indexes for the metadata tables -CREATE INDEX metadatavalue_item_idx ON MetadataValue(item_id); -CREATE INDEX metadatavalue_item_idx2 ON MetadataValue(item_id,metadata_field_id); -CREATE INDEX metadatafield_schema_idx ON MetadataFieldRegistry(metadata_schema_id); - - -------------------------------------------------------- --- Create the checksum checker tables -------------------------------------------------------- --- list of the possible results as determined --- by the system or an administrator - -CREATE TABLE checksum_results -( - result_code VARCHAR(64) PRIMARY KEY, - result_description VARCHAR2(2000) -); - - --- This table has a one-to-one relationship --- with the bitstream table. A row will be inserted --- every time a row is inserted into the bitstream table, and --- that row will be updated every time the checksum is --- re-calculated. - -CREATE TABLE most_recent_checksum -( - bitstream_id INTEGER PRIMARY KEY, - to_be_processed NUMBER(1) NOT NULL, - expected_checksum VARCHAR(64) NOT NULL, - current_checksum VARCHAR(64) NOT NULL, - last_process_start_date TIMESTAMP NOT NULL, - last_process_end_date TIMESTAMP NOT NULL, - checksum_algorithm VARCHAR(64) NOT NULL, - matched_prev_checksum NUMBER(1) NOT NULL, - result VARCHAR(64) REFERENCES checksum_results(result_code) -); - - --- A row will be inserted into this table every --- time a checksum is re-calculated. - -CREATE SEQUENCE checksum_history_seq; - -CREATE TABLE checksum_history -( - check_id INTEGER PRIMARY KEY, - bitstream_id INTEGER, - process_start_date TIMESTAMP, - process_end_date TIMESTAMP, - checksum_expected VARCHAR(64), - checksum_calculated VARCHAR(64), - result VARCHAR(64) REFERENCES checksum_results(result_code) -); - --- this will insert into the result code --- the initial results - -insert into checksum_results -values -( - 'INVALID_HISTORY', - 'Install of the cheksum checking code do not consider this history as valid' -); - -insert into checksum_results -values -( - 'BITSTREAM_NOT_FOUND', - 'The bitstream could not be found' -); - -insert into checksum_results -values -( - 'CHECKSUM_MATCH', - 'Current checksum matched previous checksum' -); - -insert into checksum_results -values -( - 'CHECKSUM_NO_MATCH', - 'Current checksum does not match previous checksum' -); - -insert into checksum_results -values -( - 'CHECKSUM_PREV_NOT_FOUND', - 'Previous checksum was not found: no comparison possible' -); - -insert into checksum_results -values -( - 'BITSTREAM_INFO_NOT_FOUND', - 'Bitstream info not found' -); - -insert into checksum_results -values -( - 'CHECKSUM_ALGORITHM_INVALID', - 'Invalid checksum algorithm' -); -insert into checksum_results -values -( - 'BITSTREAM_NOT_PROCESSED', - 'Bitstream marked to_be_processed=false' -); -insert into checksum_results -values -( - 'BITSTREAM_MARKED_DELETED', - 'Bitstream marked deleted in bitstream table' -); - --- this will insert into the most recent checksum --- on install all existing bitstreams --- setting all bitstreams already set as --- deleted to not be processed - -insert into most_recent_checksum -( - bitstream_id, - to_be_processed, - expected_checksum, - current_checksum, - last_process_start_date, - last_process_end_date, - checksum_algorithm, - matched_prev_checksum -) -select - bitstream.bitstream_id, - '1', - CASE WHEN bitstream.checksum IS NULL THEN '' ELSE bitstream.checksum END, - CASE WHEN bitstream.checksum IS NULL THEN '' ELSE bitstream.checksum END, - TO_TIMESTAMP(TO_CHAR(current_timestamp, 'DD-MM-RRRR HH24:MI:SS'), 'DD-MM-RRRR HH24:MI:SS'), - TO_TIMESTAMP(TO_CHAR(current_timestamp, 'DD-MM-RRRR HH24:MI:SS'), 'DD-MM-RRRR HH24:MI:SS'), - CASE WHEN bitstream.checksum_algorithm IS NULL THEN 'MD5' ELSE bitstream.checksum_algorithm END, - '1' -from bitstream; - --- Update all the deleted checksums --- to not be checked --- because they have since been --- deleted from the system - -update most_recent_checksum -set to_be_processed = 0 -where most_recent_checksum.bitstream_id in ( -select bitstream_id -from bitstream where deleted = '1' ); - --- this will insert into history table --- for the initial start --- we want to tell the users to disregard the initial --- inserts into the checksum history table - -insert into checksum_history -( - bitstream_id, - process_start_date, - process_end_date, - checksum_expected, - checksum_calculated -) -select most_recent_checksum.bitstream_id, - most_recent_checksum.last_process_end_date, - TO_TIMESTAMP(TO_CHAR(current_timestamp, 'DD-MM-RRRR HH24:MI:SS'), 'DD-MM-RRRR HH24:MI:SS'), - most_recent_checksum.expected_checksum, - most_recent_checksum.expected_checksum -FROM most_recent_checksum; - --- update the history to indicate that this was --- the first time the software was installed -update checksum_history -set result = 'INVALID_HISTORY'; - - -------------------------------------------------------- --- Table and views for 'browse by subject' functionality -------------------------------------------------------- -CREATE SEQUENCE itemsbysubject_seq; - -------------------------------------------------------- --- ItemsBySubject table -------------------------------------------------------- -CREATE TABLE ItemsBySubject -( - items_by_subject_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - subject VARCHAR2(2000), - sort_subject VARCHAR2(2000) -); - --- index by sort_subject -CREATE INDEX sort_subject_idx on ItemsBySubject(sort_subject); - -------------------------------------------------------- --- CollectionItemsBySubject view -------------------------------------------------------- -CREATE VIEW CollectionItemsBySubject as -SELECT Collection2Item.collection_id, ItemsBySubject.* -FROM ItemsBySubject, Collection2Item -WHERE ItemsBySubject.item_id = Collection2Item.item_id -; - -------------------------------------------------------- --- CommunityItemsBySubject view -------------------------------------------------------- -CREATE VIEW CommunityItemsBySubject as -SELECT Communities2Item.community_id, ItemsBySubject.* -FROM ItemsBySubject, Communities2Item -WHERE ItemsBySubject.item_id = Communities2Item.item_id -; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.5__Upgrade_to_DSpace_1.5_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.5__Upgrade_to_DSpace_1.5_schema.sql deleted file mode 100644 index bb217bd0d1..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.5__Upgrade_to_DSpace_1.5_schema.sql +++ /dev/null @@ -1,142 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - --- Remove NOT NULL restrictions from the checksum columns of most_recent_checksum -ALTER TABLE most_recent_checksum MODIFY expected_checksum null; -ALTER TABLE most_recent_checksum MODIFY current_checksum null; - ------------------------------------------------------- --- New Column language language in EPerson ------------------------------------------------------- - -alter table eperson ADD language VARCHAR2(64); -update eperson set language = 'en'; - --- totally unused column -alter table bundle drop column mets_bitstream_id; - -------------------------------------------------------------------------------- --- Necessary for Configurable Submission functionality: --- Modification to workspaceitem table to support keeping track --- of the last page reached within a step in the Configurable Submission Process -------------------------------------------------------------------------------- -ALTER TABLE workspaceitem ADD page_reached INTEGER; - - -------------------------------------------------------------------------- --- Increase the mimetype field size to support larger types, such as the --- new Word 2007 mimetypes. -------------------------------------------------------------------------- -ALTER TABLE BitstreamFormatRegistry MODIFY (mimetype VARCHAR(256)); - - -------------------------------------------------------------------------- --- Tables to manage cache of item counts for communities and collections -------------------------------------------------------------------------- - -CREATE TABLE collection_item_count ( - collection_id INTEGER PRIMARY KEY REFERENCES collection(collection_id), - count INTEGER -); - -CREATE TABLE community_item_count ( - community_id INTEGER PRIMARY KEY REFERENCES community(community_id), - count INTEGER -); - ------------------------------------------------------------------- --- Remove sequences and tables of the old browse system ------------------------------------------------------------------- - -DROP SEQUENCE itemsbyauthor_seq; -DROP SEQUENCE itemsbytitle_seq; -DROP SEQUENCE itemsbydate_seq; -DROP SEQUENCE itemsbydateaccessioned_seq; -DROP SEQUENCE itemsbysubject_seq; - -DROP TABLE ItemsByAuthor CASCADE CONSTRAINTS; -DROP TABLE ItemsByTitle CASCADE CONSTRAINTS; -DROP TABLE ItemsByDate CASCADE CONSTRAINTS; -DROP TABLE ItemsByDateAccessioned CASCADE CONSTRAINTS; -DROP TABLE ItemsBySubject CASCADE CONSTRAINTS; - -DROP TABLE History CASCADE CONSTRAINTS; -DROP TABLE HistoryState CASCADE CONSTRAINTS; - ----------------------------------------------------------------- --- Add indexes for foreign key columns ----------------------------------------------------------------- - -CREATE INDEX fe_bitstream_fk_idx ON FileExtension(bitstream_format_id); - -CREATE INDEX bit_bitstream_fk_idx ON Bitstream(bitstream_format_id); - -CREATE INDEX g2g_parent_fk_idx ON Group2Group(parent_id); -CREATE INDEX g2g_child_fk_idx ON Group2Group(child_id); - --- CREATE INDEX g2gc_parent_fk_idx ON Group2Group(parent_id); --- CREATE INDEX g2gc_child_fk_idx ON Group2Group(child_id); - -CREATE INDEX item_submitter_fk_idx ON Item(submitter_id); - -CREATE INDEX bundle_primary_fk_idx ON Bundle(primary_bitstream_id); - -CREATE INDEX item2bundle_bundle_fk_idx ON Item2Bundle(bundle_id); - -CREATE INDEX bundle2bits_bitstream_fk_idx ON Bundle2Bitstream(bitstream_id); - -CREATE INDEX metadatavalue_field_fk_idx ON MetadataValue(metadata_field_id); - -CREATE INDEX community_logo_fk_idx ON Community(logo_bitstream_id); - -CREATE INDEX collection_logo_fk_idx ON Collection(logo_bitstream_id); -CREATE INDEX collection_template_fk_idx ON Collection(template_item_id); -CREATE INDEX collection_workflow1_fk_idx ON Collection(workflow_step_1); -CREATE INDEX collection_workflow2_fk_idx ON Collection(workflow_step_2); -CREATE INDEX collection_workflow3_fk_idx ON Collection(workflow_step_3); -CREATE INDEX collection_submitter_fk_idx ON Collection(submitter); -CREATE INDEX collection_admin_fk_idx ON Collection(admin); - -CREATE INDEX com2com_parent_fk_idx ON Community2Community(parent_comm_id); -CREATE INDEX com2com_child_fk_idx ON Community2Community(child_comm_id); - -CREATE INDEX rp_eperson_fk_idx ON ResourcePolicy(eperson_id); -CREATE INDEX rp_epersongroup_fk_idx ON ResourcePolicy(epersongroup_id); - -CREATE INDEX epg2ep_eperson_fk_idx ON EPersonGroup2EPerson(eperson_id); - -CREATE INDEX workspace_item_fk_idx ON WorkspaceItem(item_id); -CREATE INDEX workspace_coll_fk_idx ON WorkspaceItem(collection_id); - --- CREATE INDEX workflow_item_fk_idx ON WorkflowItem(item_id); -CREATE INDEX workflow_coll_fk_idx ON WorkflowItem(collection_id); -CREATE INDEX workflow_owner_fk_idx ON WorkflowItem(owner); - -CREATE INDEX tasklist_eperson_fk_idx ON TasklistItem(eperson_id); -CREATE INDEX tasklist_workflow_fk_idx ON TasklistItem(workflow_id); - -CREATE INDEX subs_eperson_fk_idx ON Subscription(eperson_id); -CREATE INDEX subs_collection_fk_idx ON Subscription(collection_id); - -CREATE INDEX epg2wi_group_fk_idx ON epersongroup2workspaceitem(eperson_group_id); -CREATE INDEX epg2wi_workspace_fk_idx ON epersongroup2workspaceitem(workspace_item_id); - -CREATE INDEX Comm2Item_community_fk_idx ON Communities2Item( community_id ); - -CREATE INDEX mrc_result_fk_idx ON most_recent_checksum( result ); - -CREATE INDEX ch_result_fk_idx ON checksum_history( result ); - diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.6__Upgrade_to_DSpace_1.6_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.6__Upgrade_to_DSpace_1.6_schema.sql deleted file mode 100644 index 659ca32983..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.6__Upgrade_to_DSpace_1.6_schema.sql +++ /dev/null @@ -1,93 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - ------------------------------------------------------------------- --- New Column for Community Admin - Delegated Admin patch (DS-228) ------------------------------------------------------------------- -ALTER TABLE community ADD admin INTEGER REFERENCES epersongroup ( eperson_group_id ); -CREATE INDEX community_admin_fk_idx ON Community(admin); - -------------------------------------------------------------------------- --- DS-236 schema changes for Authority Control of Metadata Values -------------------------------------------------------------------------- -ALTER TABLE MetadataValue - ADD ( authority VARCHAR(100), - confidence INTEGER DEFAULT -1); - --------------------------------------------------------------------------- --- DS-295 CC License being assigned incorrect Mime Type during submission. --------------------------------------------------------------------------- -UPDATE bitstream SET bitstream_format_id = - (SELECT bitstream_format_id FROM bitstreamformatregistry WHERE short_description = 'CC License') - WHERE name = 'license_text' AND source = 'org.dspace.license.CreativeCommons'; - -UPDATE bitstream SET bitstream_format_id = - (SELECT bitstream_format_id FROM bitstreamformatregistry WHERE short_description = 'RDF XML') - WHERE name = 'license_rdf' AND source = 'org.dspace.license.CreativeCommons'; - -------------------------------------------------------------------------- --- DS-260 Cleanup of Owning collection column for template item created --- with the JSPUI after the collection creation -------------------------------------------------------------------------- -UPDATE item SET owning_collection = null WHERE item_id IN - (SELECT template_item_id FROM collection WHERE template_item_id IS NOT null); - --- Recreate restraints with a know name and deferrable option! --- (The previous version of these constraints is dropped by org.dspace.storage.rdbms.migration.V1_5_9__Drop_constraint_for_DSpace_1_6_schema) -ALTER TABLE community2collection ADD CONSTRAINT comm2coll_collection_fk FOREIGN KEY (collection_id) REFERENCES collection DEFERRABLE; -ALTER TABLE community2community ADD CONSTRAINT com2com_child_fk FOREIGN KEY (child_comm_id) REFERENCES community DEFERRABLE; -ALTER TABLE collection2item ADD CONSTRAINT coll2item_item_fk FOREIGN KEY (item_id) REFERENCES item DEFERRABLE; - - ------------------------------------------------------------------- --- New tables /sequences for the harvester functionality (DS-289) ------------------------------------------------------------------- -CREATE SEQUENCE harvested_collection_seq; -CREATE SEQUENCE harvested_item_seq; - -------------------------------------------------------- --- Create the harvest settings table -------------------------------------------------------- --- Values used by the OAIHarvester to harvest a collection --- HarvestInstance is the DAO class for this table - -CREATE TABLE harvested_collection -( - collection_id INTEGER REFERENCES collection(collection_id) ON DELETE CASCADE, - harvest_type INTEGER, - oai_source VARCHAR(256), - oai_set_id VARCHAR(256), - harvest_message VARCHAR2(512), - metadata_config_id VARCHAR(256), - harvest_status INTEGER, - harvest_start_time TIMESTAMP, - last_harvested TIMESTAMP, - id INTEGER PRIMARY KEY -); - -CREATE INDEX harvested_collection_fk_idx ON harvested_collection(collection_id); - - -CREATE TABLE harvested_item -( - item_id INTEGER REFERENCES item(item_id) ON DELETE CASCADE, - last_harvested TIMESTAMP, - oai_id VARCHAR(64), - id INTEGER PRIMARY KEY -); - -CREATE INDEX harvested_item_fk_idx ON harvested_item(item_id); - diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.7__Upgrade_to_DSpace_1.7_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.7__Upgrade_to_DSpace_1.7_schema.sql deleted file mode 100644 index f4b2737fb3..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.7__Upgrade_to_DSpace_1.7_schema.sql +++ /dev/null @@ -1,20 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - ------------------------------------------------------------------- --- Remove unused / obsolete sequence 'dctyperegistry_seq' (DS-729) ------------------------------------------------------------------- -DROP SEQUENCE dctyperegistry_seq; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.8__Upgrade_to_DSpace_1.8_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.8__Upgrade_to_DSpace_1.8_schema.sql deleted file mode 100644 index f96cddbe7f..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V1.8__Upgrade_to_DSpace_1.8_schema.sql +++ /dev/null @@ -1,23 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -------------------------------------------- --- New column for bitstream order DS-749 -- -------------------------------------------- -ALTER TABLE bundle2bitstream ADD bitstream_order INTEGER; - ---Place the sequence id's in the order -UPDATE bundle2bitstream SET bitstream_order=(SELECT sequence_id FROM bitstream WHERE bitstream.bitstream_id=bundle2bitstream.bitstream_id); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V3.0__Upgrade_to_DSpace_3.x_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V3.0__Upgrade_to_DSpace_3.x_schema.sql deleted file mode 100644 index 472dc7dc52..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V3.0__Upgrade_to_DSpace_3.x_schema.sql +++ /dev/null @@ -1,52 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -ALTER TABLE resourcepolicy - ADD ( - rpname VARCHAR2(30), - rptype VARCHAR2(30), - rpdescription VARCHAR2(100) - ); - - -ALTER TABLE item ADD discoverable NUMBER(1); - -CREATE TABLE versionhistory -( - versionhistory_id INTEGER NOT NULL PRIMARY KEY -); - -CREATE TABLE versionitem -( - versionitem_id INTEGER NOT NULL PRIMARY KEY, - item_id INTEGER REFERENCES Item(item_id), - version_number INTEGER, - eperson_id INTEGER REFERENCES EPerson(eperson_id), - version_date TIMESTAMP, - version_summary VARCHAR2(255), - versionhistory_id INTEGER REFERENCES VersionHistory(versionhistory_id) -); - -CREATE SEQUENCE versionitem_seq; -CREATE SEQUENCE versionhistory_seq; - - -------------------------------------------- --- New columns and longer hash for salted password hashing DS-861 -- -------------------------------------------- -ALTER TABLE EPerson modify( password VARCHAR(128)); -ALTER TABLE EPerson ADD salt VARCHAR(32); -ALTER TABLE EPerson ADD digest_algorithm VARCHAR(16); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V4.0__Upgrade_to_DSpace_4.x_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V4.0__Upgrade_to_DSpace_4.x_schema.sql deleted file mode 100644 index 8102376906..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V4.0__Upgrade_to_DSpace_4.x_schema.sql +++ /dev/null @@ -1,88 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -------------------------------------------- --- Ensure that discoverable has a sensible default -------------------------------------------- -update item set discoverable=1 WHERE discoverable IS NULL; - -------------------------------------------- --- Add support for DOIs (table and seq.) -- -------------------------------------------- - -CREATE TABLE Doi -( - doi_id INTEGER PRIMARY KEY, - doi VARCHAR2(256) UNIQUE, - resource_type_id INTEGER, - resource_id INTEGER, - status INTEGER -); - -CREATE SEQUENCE doi_seq; - --- index by resource id and resource type id -CREATE INDEX doi_resource_id_type_idx ON doi(resource_id, resource_type_id); - -------------------------------------------- --- Table of running web applications for 'dspace version' -- -------------------------------------------- - -CREATE TABLE Webapp -( - webapp_id INTEGER NOT NULL PRIMARY KEY, - AppName VARCHAR2(32), - URL VARCHAR2(1000), - Started TIMESTAMP, - isUI NUMBER(1) -); - -CREATE SEQUENCE webapp_seq; - -------------------------------------------------------- --- DS-824 RequestItem table -------------------------------------------------------- - -CREATE TABLE requestitem -( - requestitem_id INTEGER NOT NULL, - token varchar(48), - item_id INTEGER, - bitstream_id INTEGER, - allfiles NUMBER(1), - request_email VARCHAR2(64), - request_name VARCHAR2(64), - request_date TIMESTAMP, - accept_request NUMBER(1), - decision_date TIMESTAMP, - expires TIMESTAMP, - CONSTRAINT requestitem_pkey PRIMARY KEY (requestitem_id), - CONSTRAINT requestitem_token_key UNIQUE (token) -); - -CREATE SEQUENCE requestitem_seq; - -------------------------------------------------------- --- DS-1655 Disable "Initial Questions" page in Submission UI by default -------------------------------------------------------- -update workspaceitem set multiple_titles=1, published_before=1, multiple_files=1; -update workflowitem set multiple_titles=1, published_before=1, multiple_files=1; - -------------------------------------------------------- --- DS-1811 Removing a collection fails if non-Solr DAO has been used before for item count -------------------------------------------------------- -delete from collection_item_count; -delete from community_item_count; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V4.9_2015.10.26__DS-2818_registry_update.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V4.9_2015.10.26__DS-2818_registry_update.sql deleted file mode 100644 index 6d75905ec9..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V4.9_2015.10.26__DS-2818_registry_update.sql +++ /dev/null @@ -1,64 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - --- Special case of migration, we need to the EPerson schema in order to get our metadata for all queries to work --- but we cannot a DB connection until our database is up to date, so we need to create our registries manually in sql - -INSERT INTO metadataschemaregistry (metadata_schema_id, namespace, short_id) SELECT metadataschemaregistry_seq.nextval, 'http://dspace.org/eperson' as namespace, 'eperson' as short_id FROM dual - WHERE NOT EXISTS (SELECT metadata_schema_id,namespace,short_id FROM metadataschemaregistry WHERE namespace = 'http://dspace.org/eperson' AND short_id = 'eperson'); - - --- Insert eperson.firstname -INSERT INTO metadatafieldregistry (metadata_field_id, metadata_schema_id, element) - SELECT metadatafieldregistry_seq.nextval, - (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='eperson'), 'firstname' FROM dual - WHERE NOT EXISTS - (SELECT metadata_field_id,element FROM metadatafieldregistry WHERE element = 'firstname' AND qualifier IS NULL AND metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='eperson')); - --- Insert eperson.lastname -INSERT INTO metadatafieldregistry (metadata_field_id, metadata_schema_id, element) - SELECT metadatafieldregistry_seq.nextval, - (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='eperson'), 'lastname' FROM dual - WHERE NOT EXISTS - (SELECT metadata_field_id,element FROM metadatafieldregistry WHERE element = 'lastname' AND qualifier IS NULL AND metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='eperson')); - --- Insert eperson.phone -INSERT INTO metadatafieldregistry (metadata_field_id, metadata_schema_id, element) - SELECT metadatafieldregistry_seq.nextval, - (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='eperson'), 'phone' FROM dual - WHERE NOT EXISTS - (SELECT metadata_field_id,element FROM metadatafieldregistry WHERE element = 'phone' AND qualifier IS NULL AND metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='eperson')); - --- Insert eperson.language -INSERT INTO metadatafieldregistry (metadata_field_id, metadata_schema_id, element) - SELECT metadatafieldregistry_seq.nextval, - (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='eperson'), 'language' FROM dual - WHERE NOT EXISTS - (SELECT metadata_field_id,element FROM metadatafieldregistry WHERE element = 'language' AND qualifier IS NULL AND metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='eperson')); - --- Insert into dc.provenance -INSERT INTO metadatafieldregistry (metadata_field_id, metadata_schema_id, element) - SELECT metadatafieldregistry_seq.nextval, - (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='dc'), 'provenance' FROM dual - WHERE NOT EXISTS - (SELECT metadata_field_id,element FROM metadatafieldregistry WHERE element = 'provenance' AND qualifier IS NULL AND metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='dc')); - --- Insert into dc.rights.license -INSERT INTO metadatafieldregistry (metadata_field_id, metadata_schema_id, element, qualifier) - SELECT metadatafieldregistry_seq.nextval, - (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='dc'), 'rights', 'license' FROM dual - WHERE NOT EXISTS - (SELECT metadata_field_id,element,qualifier FROM metadatafieldregistry WHERE element = 'rights' AND qualifier='license' AND metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='dc')); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.0_2014.08.08__DS-1945_Helpdesk_Request_a_Copy.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.0_2014.08.08__DS-1945_Helpdesk_Request_a_Copy.sql deleted file mode 100644 index c86cfe3122..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.0_2014.08.08__DS-1945_Helpdesk_Request_a_Copy.sql +++ /dev/null @@ -1,20 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - ------------------------------------------------------- --- DS-1945 RequestItem Helpdesk, store request message ------------------------------------------------------- -ALTER TABLE requestitem ADD request_message VARCHAR2(2000); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.0_2014.09.26__DS-1582_Metadata_For_All_Objects.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.0_2014.09.26__DS-1582_Metadata_For_All_Objects.sql deleted file mode 100644 index 8f0cd0d5e1..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.0_2014.09.26__DS-1582_Metadata_For_All_Objects.sql +++ /dev/null @@ -1,333 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - ------------------------------------------------------- --- DS-1582 Metadata on all DSpace Objects --- NOTE: This script also has a complimentary Flyway Java Migration --- which drops the "item_id" constraint on metadatavalue --- org.dspace.storage.rdbms.migration.V5_0_2014_09_25__DS_1582_Metadata_For_All_Objects_drop_constraint ------------------------------------------------------- -alter table metadatavalue rename column item_id to resource_id; - -alter table metadatavalue MODIFY(resource_id not null); -alter table metadatavalue add resource_type_id integer; -UPDATE metadatavalue SET resource_type_id = 2; -alter table metadatavalue MODIFY(resource_type_id not null); - - - --- --------- --- community --- --------- - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -community_id AS resource_id, -4 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier is null) AS metadata_field_id, -introductory_text AS text_value, -null AS text_lang, -0 AS place -FROM community where not introductory_text is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -community_id AS resource_id, -4 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier = 'abstract') AS metadata_field_id, -short_description AS text_value, -null AS text_lang, -0 AS place -FROM community where not short_description is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -community_id AS resource_id, -4 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier = 'tableofcontents') AS metadata_field_id, -side_bar_text AS text_value, -null AS text_lang, -0 AS place -FROM community where not side_bar_text is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -community_id AS resource_id, -4 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'rights' and qualifier is null) AS metadata_field_id, -copyright_text AS text_value, -null AS text_lang, -0 AS place -FROM community where not copyright_text is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -community_id AS resource_id, -4 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id, -name AS text_value, -null AS text_lang, -0 AS place -FROM community where not name is null; - -alter table community drop (introductory_text, short_description, side_bar_text, copyright_text, name); - - --- ---------- --- collection --- ---------- - - - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -collection_id AS resource_id, -3 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier is null) AS metadata_field_id, -introductory_text AS text_value, -null AS text_lang, -0 AS place -FROM collection where not introductory_text is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -collection_id AS resource_id, -3 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier = 'abstract') AS metadata_field_id, -short_description AS text_value, -null AS text_lang, -0 AS place -FROM collection where not short_description is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -collection_id AS resource_id, -3 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier = 'tableofcontents') AS metadata_field_id, -side_bar_text AS text_value, -null AS text_lang, -0 AS place -FROM collection where not side_bar_text is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -collection_id AS resource_id, -3 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'rights' and qualifier is null) AS metadata_field_id, -copyright_text AS text_value, -null AS text_lang, -0 AS place -FROM collection where not copyright_text is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -collection_id AS resource_id, -3 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id, -name AS text_value, -null AS text_lang, -0 AS place -FROM collection where not name is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -collection_id AS resource_id, -3 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'provenance' and qualifier is null) AS metadata_field_id, -provenance_description AS text_value, -null AS text_lang, -0 AS place -FROM collection where not provenance_description is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -collection_id AS resource_id, -3 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'rights' and qualifier = 'license') AS metadata_field_id, -license AS text_value, -null AS text_lang, -0 AS place -FROM collection where not license is null; - -alter table collection drop (introductory_text, short_description, copyright_text, side_bar_text, name, license, provenance_description); - - --- --------- --- bundle --- --------- - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -bundle_id AS resource_id, -1 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id, -name AS text_value, -null AS text_lang, -0 AS place -FROM bundle where not name is null; - -alter table bundle drop column name; - - - --- --------- --- bitstream --- --------- - - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -bitstream_id AS resource_id, -0 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id, -name AS text_value, -null AS text_lang, -0 AS place -FROM bitstream where not name is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -bitstream_id AS resource_id, -0 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'description' and qualifier is null) AS metadata_field_id, -description AS text_value, -null AS text_lang, -0 AS place -FROM bitstream where not description is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -bitstream_id AS resource_id, -0 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'format' and qualifier is null) AS metadata_field_id, -user_format_description AS text_value, -null AS text_lang, -0 AS place -FROM bitstream where not user_format_description is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -bitstream_id AS resource_id, -0 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'source' and qualifier is null) AS metadata_field_id, -source AS text_value, -null AS text_lang, -0 AS place -FROM bitstream where not source is null; - -alter table bitstream drop (name, description, user_format_description, source); - - --- --------- --- epersongroup --- --------- - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -eperson_group_id AS resource_id, -6 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='dc') and element = 'title' and qualifier is null) AS metadata_field_id, -name AS text_value, -null AS text_lang, -0 AS place -FROM epersongroup where not name is null; - -alter table epersongroup drop column name; - - - --- --------- --- eperson --- --------- - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -eperson_id AS resource_id, -7 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='eperson') and element = 'firstname' and qualifier is null) AS metadata_field_id, -firstname AS text_value, -null AS text_lang, -0 AS place -FROM eperson where not firstname is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -eperson_id AS resource_id, -7 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='eperson') and element = 'lastname' and qualifier is null) AS metadata_field_id, -lastname AS text_value, -null AS text_lang, -0 AS place -FROM eperson where not lastname is null; - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -eperson_id AS resource_id, -7 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='eperson') and element = 'phone' and qualifier is null) AS metadata_field_id, -phone AS text_value, -null AS text_lang, -0 AS place -FROM eperson where not phone is null; - - -INSERT INTO metadatavalue (metadata_value_id, resource_id, resource_type_id, metadata_field_id, text_value, text_lang, place) -SELECT -metadatavalue_seq.nextval as metadata_value_id, -eperson_id AS resource_id, -7 AS resource_type_id, -(select metadata_field_id from metadatafieldregistry where metadata_schema_id=(select metadata_schema_id from metadataschemaregistry where short_id='eperson') and element = 'language' and qualifier is null) AS metadata_field_id, -language AS text_value, -null AS text_lang, -0 AS place -FROM eperson where not language is null; - -alter table eperson drop (firstname, lastname, phone, language); - --- --------- --- dcvalue view --- --------- - -drop view dcvalue; - -CREATE VIEW dcvalue AS - SELECT MetadataValue.metadata_value_id AS "dc_value_id", MetadataValue.resource_id, - MetadataValue.metadata_field_id AS "dc_type_id", MetadataValue.text_value, - MetadataValue.text_lang, MetadataValue.place - FROM MetadataValue, MetadataFieldRegistry - WHERE MetadataValue.metadata_field_id = MetadataFieldRegistry.metadata_field_id - AND MetadataFieldRegistry.metadata_schema_id = 1 AND MetadataValue.resource_type_id = 2; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.6_2016.08.23__DS-3097.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.6_2016.08.23__DS-3097.sql deleted file mode 100644 index 2e09b807de..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.6_2016.08.23__DS-3097.sql +++ /dev/null @@ -1,24 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS-3097 introduced new action id for WITHDRAWN_READ ------------------------------------------------------- - -UPDATE resourcepolicy SET action_id = 12 where action_id = 0 and resource_type_id = 0 and resource_id in ( - SELECT bundle2bitstream.bitstream_id FROM bundle2bitstream - LEFT JOIN item2bundle ON bundle2bitstream.bundle_id = item2bundle.bundle_id - LEFT JOIN item ON item2bundle.item_id = item.item_id - WHERE item.withdrawn = 1 -); - -UPDATE resourcepolicy SET action_id = 12 where action_id = 0 and resource_type_id = 1 and resource_id in ( - SELECT item2bundle.bundle_id FROM item2bundle - LEFT JOIN item ON item2bundle.item_id = item.item_id - WHERE item.withdrawn = 1 -); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.7_2017.04.11__DS-3563_Index_metadatavalue_resource_type_id_column.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.7_2017.04.11__DS-3563_Index_metadatavalue_resource_type_id_column.sql deleted file mode 100644 index 9f9836faf4..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V5.7_2017.04.11__DS-3563_Index_metadatavalue_resource_type_id_column.sql +++ /dev/null @@ -1,23 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS-3563 Missing database index on metadatavalue.resource_type_id ------------------------------------------------------- --- Create an index on the metadata value resource_type_id column so that it can be searched efficiently. -declare - index_not_exists EXCEPTION; - PRAGMA EXCEPTION_INIT(index_not_exists, -1418); -begin - - execute immediate 'DROP INDEX metadatavalue_type_id_idx'; - exception - when index_not_exists then null; -end; -/ -CREATE INDEX metadatavalue_type_id_idx ON metadatavalue (resource_type_id); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2015.03.07__DS-2701_Hibernate_migration.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2015.03.07__DS-2701_Hibernate_migration.sql deleted file mode 100644 index dd857e763d..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2015.03.07__DS-2701_Hibernate_migration.sql +++ /dev/null @@ -1,469 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS-2701 Service based API / Hibernate integration ------------------------------------------------------- -DROP VIEW community2item; - -CREATE TABLE dspaceobject -( - uuid RAW(16) NOT NULL PRIMARY KEY -); - -CREATE TABLE site -( - uuid RAW(16) NOT NULL PRIMARY KEY REFERENCES dspaceobject(uuid) -); - -ALTER TABLE eperson ADD uuid RAW(16) DEFAULT SYS_GUID(); -INSERT INTO dspaceobject (uuid) SELECT uuid FROM eperson; -ALTER TABLE eperson ADD FOREIGN KEY (uuid) REFERENCES dspaceobject; -ALTER TABLE eperson MODIFY uuid NOT NULL; -ALTER TABLE eperson ADD CONSTRAINT eperson_id_unique PRIMARY KEY (uuid); -UPDATE eperson SET require_certificate = '0' WHERE require_certificate IS NULL; -UPDATE eperson SET self_registered = '0' WHERE self_registered IS NULL; - - - -UPDATE metadatavalue SET text_value='Administrator' - WHERE resource_type_id=6 AND resource_id=1; -UPDATE metadatavalue SET text_value='Anonymous' - WHERE resource_type_id=6 AND resource_id=0; - -ALTER TABLE epersongroup ADD uuid RAW(16) DEFAULT SYS_GUID(); -INSERT INTO dspaceobject (uuid) SELECT uuid FROM epersongroup; -ALTER TABLE epersongroup ADD FOREIGN KEY (uuid) REFERENCES dspaceobject; -ALTER TABLE epersongroup MODIFY uuid NOT NULL; -ALTER TABLE epersongroup ADD CONSTRAINT epersongroup_id_unique PRIMARY KEY (uuid); - -ALTER TABLE item ADD uuid RAW(16) DEFAULT SYS_GUID(); -INSERT INTO dspaceobject (uuid) SELECT uuid FROM item; -ALTER TABLE item ADD FOREIGN KEY (uuid) REFERENCES dspaceobject; -ALTER TABLE item MODIFY uuid NOT NULL; -ALTER TABLE item ADD CONSTRAINT item_id_unique PRIMARY KEY (uuid); - -ALTER TABLE community ADD uuid RAW(16) DEFAULT SYS_GUID(); -INSERT INTO dspaceobject (uuid) SELECT uuid FROM community; -ALTER TABLE community ADD FOREIGN KEY (uuid) REFERENCES dspaceobject; -ALTER TABLE community MODIFY uuid NOT NULL; -ALTER TABLE community ADD CONSTRAINT community_id_unique PRIMARY KEY (uuid); - - -ALTER TABLE collection ADD uuid RAW(16) DEFAULT SYS_GUID(); -INSERT INTO dspaceobject (uuid) SELECT uuid FROM collection; -ALTER TABLE collection ADD FOREIGN KEY (uuid) REFERENCES dspaceobject; -ALTER TABLE collection MODIFY uuid NOT NULL; -ALTER TABLE collection ADD CONSTRAINT collection_id_unique PRIMARY KEY (uuid); - -ALTER TABLE bundle ADD uuid RAW(16) DEFAULT SYS_GUID(); -INSERT INTO dspaceobject (uuid) SELECT uuid FROM bundle; -ALTER TABLE bundle ADD FOREIGN KEY (uuid) REFERENCES dspaceobject; -ALTER TABLE bundle MODIFY uuid NOT NULL; -ALTER TABLE bundle ADD CONSTRAINT bundle_id_unique PRIMARY KEY (uuid); - -ALTER TABLE bitstream ADD uuid RAW(16) DEFAULT SYS_GUID(); -INSERT INTO dspaceobject (uuid) SELECT uuid FROM bitstream; -ALTER TABLE bitstream ADD FOREIGN KEY (uuid) REFERENCES dspaceobject; -ALTER TABLE bitstream MODIFY uuid NOT NULL; -ALTER TABLE bitstream ADD CONSTRAINT bitstream_id_unique PRIMARY KEY (uuid); -UPDATE bitstream SET sequence_id = -1 WHERE sequence_id IS NULL; -UPDATE bitstream SET size_bytes = -1 WHERE size_bytes IS NULL; -UPDATE bitstream SET deleted = '0' WHERE deleted IS NULL; -UPDATE bitstream SET store_number = -1 WHERE store_number IS NULL; - --- Migrate EPersonGroup2EPerson table -ALTER TABLE EPersonGroup2EPerson RENAME COLUMN eperson_group_id to eperson_group_legacy_id; -ALTER TABLE EPersonGroup2EPerson RENAME COLUMN eperson_id to eperson_legacy_id; -ALTER TABLE EPersonGroup2EPerson ADD eperson_group_id RAW(16) REFERENCES EpersonGroup(uuid); -ALTER TABLE EPersonGroup2EPerson ADD eperson_id RAW(16) REFERENCES Eperson(uuid); -CREATE INDEX EpersonGroup2Eperson_group on EpersonGroup2Eperson(eperson_group_id); -CREATE INDEX EpersonGroup2Eperson_person on EpersonGroup2Eperson(eperson_id); -UPDATE EPersonGroup2EPerson SET eperson_group_id = (SELECT EPersonGroup.uuid FROM EpersonGroup WHERE EPersonGroup2EPerson.eperson_group_legacy_id = EPersonGroup.eperson_group_id); -UPDATE EPersonGroup2EPerson SET eperson_id = (SELECT eperson.uuid FROM eperson WHERE EPersonGroup2EPerson.eperson_legacy_id = eperson.eperson_id); -ALTER TABLE EPersonGroup2EPerson MODIFY eperson_group_id NOT NULL; -ALTER TABLE EPersonGroup2EPerson MODIFY eperson_id NOT NULL; -ALTER TABLE EPersonGroup2EPerson DROP COLUMN eperson_group_legacy_id; -ALTER TABLE EPersonGroup2EPerson DROP COLUMN eperson_legacy_id; -ALTER TABLE epersongroup2eperson DROP COLUMN id; -ALTER TABLE EPersonGroup2EPerson add CONSTRAINT EPersonGroup2EPerson_unique primary key (eperson_group_id,eperson_id); - --- Migrate GROUP2GROUP table -ALTER TABLE Group2Group RENAME COLUMN parent_id to parent_legacy_id; -ALTER TABLE Group2Group RENAME COLUMN child_id to child_legacy_id; -ALTER TABLE Group2Group ADD parent_id RAW(16) REFERENCES EpersonGroup(uuid); -ALTER TABLE Group2Group ADD child_id RAW(16) REFERENCES EpersonGroup(uuid); -CREATE INDEX Group2Group_parent on Group2Group(parent_id); -CREATE INDEX Group2Group_child on Group2Group(child_id); -UPDATE Group2Group SET parent_id = (SELECT EPersonGroup.uuid FROM EpersonGroup WHERE Group2Group.parent_legacy_id = EPersonGroup.eperson_group_id); -UPDATE Group2Group SET child_id = (SELECT EpersonGroup.uuid FROM EpersonGroup WHERE Group2Group.child_legacy_id = EpersonGroup.eperson_group_id); -ALTER TABLE Group2Group MODIFY parent_id NOT NULL; -ALTER TABLE Group2Group MODIFY child_id NOT NULL; -ALTER TABLE Group2Group DROP COLUMN parent_legacy_id; -ALTER TABLE Group2Group DROP COLUMN child_legacy_id; -ALTER TABLE Group2Group DROP COLUMN id; -ALTER TABLE Group2Group add CONSTRAINT Group2Group_unique primary key (parent_id,child_id); - --- Migrate collection2item -ALTER TABLE Collection2Item RENAME COLUMN collection_id to collection_legacy_id; -ALTER TABLE Collection2Item RENAME COLUMN item_id to item_legacy_id; -ALTER TABLE Collection2Item ADD collection_id RAW(16) REFERENCES Collection(uuid); -ALTER TABLE Collection2Item ADD item_id RAW(16) REFERENCES Item(uuid); -CREATE INDEX Collecion2Item_collection on Collection2Item(collection_id); -CREATE INDEX Collecion2Item_item on Collection2Item(item_id); -UPDATE Collection2Item SET collection_id = (SELECT Collection.uuid FROM Collection WHERE Collection2Item.collection_legacy_id = Collection.collection_id); -UPDATE Collection2Item SET item_id = (SELECT Item.uuid FROM Item WHERE Collection2Item.item_legacy_id = Item.item_id); -ALTER TABLE Collection2Item MODIFY collection_id NOT NULL; -ALTER TABLE Collection2Item MODIFY item_id NOT NULL; -ALTER TABLE Collection2Item DROP COLUMN collection_legacy_id; -ALTER TABLE Collection2Item DROP COLUMN item_legacy_id; -ALTER TABLE Collection2Item DROP COLUMN id; --- Magic query that will delete all duplicate collection item_id references from the database (if we don't do this the primary key creation will fail) -DELETE FROM collection2item WHERE rowid NOT IN (SELECT MIN(rowid) FROM collection2item GROUP BY collection_id,item_id); -ALTER TABLE Collection2Item add CONSTRAINT collection2item_unique primary key (collection_id,item_id); - --- Migrate Community2Community -ALTER TABLE Community2Community RENAME COLUMN parent_comm_id to parent_legacy_id; -ALTER TABLE Community2Community RENAME COLUMN child_comm_id to child_legacy_id; -ALTER TABLE Community2Community ADD parent_comm_id RAW(16) REFERENCES Community(uuid); -ALTER TABLE Community2Community ADD child_comm_id RAW(16) REFERENCES Community(uuid); -CREATE INDEX Community2Community_parent on Community2Community(parent_comm_id); -CREATE INDEX Community2Community_child on Community2Community(child_comm_id); -UPDATE Community2Community SET parent_comm_id = (SELECT Community.uuid FROM Community WHERE Community2Community.parent_legacy_id = Community.community_id); -UPDATE Community2Community SET child_comm_id = (SELECT Community.uuid FROM Community WHERE Community2Community.child_legacy_id = Community.community_id); -ALTER TABLE Community2Community MODIFY parent_comm_id NOT NULL; -ALTER TABLE Community2Community MODIFY child_comm_id NOT NULL; -ALTER TABLE Community2Community DROP COLUMN parent_legacy_id; -ALTER TABLE Community2Community DROP COLUMN child_legacy_id; -ALTER TABLE Community2Community DROP COLUMN id; -ALTER TABLE Community2Community add CONSTRAINT Community2Community_unique primary key (parent_comm_id,child_comm_id); - --- Migrate community2collection -ALTER TABLE community2collection RENAME COLUMN collection_id to collection_legacy_id; -ALTER TABLE community2collection RENAME COLUMN community_id to community_legacy_id; -ALTER TABLE community2collection ADD collection_id RAW(16) REFERENCES Collection(uuid); -ALTER TABLE community2collection ADD community_id RAW(16) REFERENCES Community(uuid); -CREATE INDEX community2collection_collectio on community2collection(collection_id); -CREATE INDEX community2collection_community on community2collection(community_id); -UPDATE community2collection SET collection_id = (SELECT Collection.uuid FROM Collection WHERE community2collection.collection_legacy_id = Collection.collection_id); -UPDATE community2collection SET community_id = (SELECT Community.uuid FROM Community WHERE community2collection.community_legacy_id = Community.community_id); -ALTER TABLE community2collection MODIFY collection_id NOT NULL; -ALTER TABLE community2collection MODIFY community_id NOT NULL; -ALTER TABLE community2collection DROP COLUMN collection_legacy_id; -ALTER TABLE community2collection DROP COLUMN community_legacy_id; -ALTER TABLE community2collection DROP COLUMN id; -ALTER TABLE community2collection add CONSTRAINT community2collection_unique primary key (collection_id,community_id); - - --- Migrate Group2GroupCache table -ALTER TABLE Group2GroupCache RENAME COLUMN parent_id to parent_legacy_id; -ALTER TABLE Group2GroupCache RENAME COLUMN child_id to child_legacy_id; -ALTER TABLE Group2GroupCache ADD parent_id RAW(16) REFERENCES EpersonGroup(uuid); -ALTER TABLE Group2GroupCache ADD child_id RAW(16) REFERENCES EpersonGroup(uuid); -CREATE INDEX Group2GroupCache_parent on Group2GroupCache(parent_id); -CREATE INDEX Group2GroupCache_child on Group2GroupCache(child_id); -UPDATE Group2GroupCache SET parent_id = (SELECT EPersonGroup.uuid FROM EpersonGroup WHERE Group2GroupCache.parent_legacy_id = EPersonGroup.eperson_group_id); -UPDATE Group2GroupCache SET child_id = (SELECT EpersonGroup.uuid FROM EpersonGroup WHERE Group2GroupCache.child_legacy_id = EpersonGroup.eperson_group_id); -ALTER TABLE Group2GroupCache MODIFY parent_id NOT NULL; -ALTER TABLE Group2GroupCache MODIFY child_id NOT NULL; -ALTER TABLE Group2GroupCache DROP COLUMN parent_legacy_id; -ALTER TABLE Group2GroupCache DROP COLUMN child_legacy_id; -ALTER TABLE Group2GroupCache DROP COLUMN id; -ALTER TABLE Group2GroupCache add CONSTRAINT Group2GroupCache_unique primary key (parent_id,child_id); - --- Migrate Item2Bundle -ALTER TABLE item2bundle RENAME COLUMN bundle_id to bundle_legacy_id; -ALTER TABLE item2bundle RENAME COLUMN item_id to item_legacy_id; -ALTER TABLE item2bundle ADD bundle_id RAW(16) REFERENCES Bundle(uuid); -ALTER TABLE item2bundle ADD item_id RAW(16) REFERENCES Item(uuid); -CREATE INDEX item2bundle_bundle on item2bundle(bundle_id); -CREATE INDEX item2bundle_item on item2bundle(item_id); -UPDATE item2bundle SET bundle_id = (SELECT Bundle.uuid FROM Bundle WHERE item2bundle.bundle_legacy_id = Bundle.bundle_id); -UPDATE item2bundle SET item_id = (SELECT Item.uuid FROM Item WHERE item2bundle.item_legacy_id = Item.item_id); -ALTER TABLE item2bundle MODIFY bundle_id NOT NULL; -ALTER TABLE item2bundle MODIFY item_id NOT NULL; -ALTER TABLE item2bundle DROP COLUMN bundle_legacy_id; -ALTER TABLE item2bundle DROP COLUMN item_legacy_id; -ALTER TABLE item2bundle DROP COLUMN id; -ALTER TABLE item2bundle add CONSTRAINT item2bundle_unique primary key (bundle_id,item_id); - ---Migrate Bundle2Bitsteam -ALTER TABLE bundle2bitstream RENAME COLUMN bundle_id to bundle_legacy_id; -ALTER TABLE bundle2bitstream RENAME COLUMN bitstream_id to bitstream_legacy_id; -ALTER TABLE bundle2bitstream ADD bundle_id RAW(16) REFERENCES Bundle(uuid); -ALTER TABLE bundle2bitstream ADD bitstream_id RAW(16) REFERENCES Bitstream(uuid); -CREATE INDEX bundle2bitstream_bundle on bundle2bitstream(bundle_id); -CREATE INDEX bundle2bitstream_bitstream on bundle2bitstream(bitstream_id); -UPDATE bundle2bitstream SET bundle_id = (SELECT bundle.uuid FROM bundle WHERE bundle2bitstream.bundle_legacy_id = bundle.bundle_id); -UPDATE bundle2bitstream SET bitstream_id = (SELECT bitstream.uuid FROM bitstream WHERE bundle2bitstream.bitstream_legacy_id = bitstream.bitstream_id); -ALTER TABLE bundle2bitstream RENAME COLUMN bitstream_order to bitstream_order_legacy; -ALTER TABLE bundle2bitstream ADD bitstream_order INTEGER; -MERGE INTO bundle2bitstream dst -USING ( SELECT ROWID AS r_id - , ROW_NUMBER () OVER ( PARTITION BY bundle_id - ORDER BY bitstream_order_legacy, bitstream_id - ) AS new_order - FROM bundle2bitstream - ) src -ON (dst.ROWID = src.r_id) -WHEN MATCHED THEN UPDATE -SET dst.bitstream_order = (src.new_order-1) -; -ALTER TABLE bundle2bitstream MODIFY bundle_id NOT NULL; -ALTER TABLE bundle2bitstream MODIFY bitstream_id NOT NULL; -ALTER TABLE bundle2bitstream DROP COLUMN bundle_legacy_id; -ALTER TABLE bundle2bitstream DROP COLUMN bitstream_legacy_id; -ALTER TABLE bundle2bitstream DROP COLUMN id; -ALTER TABLE bundle2bitstream add CONSTRAINT bundle2bitstream_unique primary key (bitstream_id,bundle_id,bitstream_order); - - --- Migrate item -ALTER TABLE item RENAME COLUMN submitter_id to submitter_id_legacy_id; -ALTER TABLE item ADD submitter_id RAW(16) REFERENCES EPerson(uuid); -CREATE INDEX item_submitter on item(submitter_id); -UPDATE item SET submitter_id = (SELECT eperson.uuid FROM eperson WHERE item.submitter_id_legacy_id = eperson.eperson_id); -ALTER TABLE item DROP COLUMN submitter_id_legacy_id; - -ALTER TABLE item RENAME COLUMN owning_collection to owning_collection_legacy; -ALTER TABLE item ADD owning_collection RAW(16) REFERENCES Collection(uuid); -CREATE INDEX item_collection on item(owning_collection); -UPDATE item SET owning_collection = (SELECT Collection.uuid FROM Collection WHERE item.owning_collection_legacy = collection.collection_id); -ALTER TABLE item DROP COLUMN owning_collection_legacy; - -UPDATE item SET in_archive = '0' WHERE in_archive IS NULL; -UPDATE item SET discoverable = '0' WHERE discoverable IS NULL; -UPDATE item SET withdrawn = '0' WHERE withdrawn IS NULL; - --- Migrate bundle -ALTER TABLE bundle RENAME COLUMN primary_bitstream_id to primary_bitstream_legacy_id; -ALTER TABLE bundle ADD primary_bitstream_id RAW(16) REFERENCES Bitstream(uuid); -CREATE INDEX bundle_primary on bundle(primary_bitstream_id); -UPDATE bundle SET primary_bitstream_id = (SELECT Bitstream.uuid FROM Bitstream WHERE bundle.primary_bitstream_legacy_id = Bitstream.bitstream_id); -ALTER TABLE bundle DROP COLUMN primary_bitstream_legacy_id; - - --- Migrate community references -ALTER TABLE Community RENAME COLUMN admin to admin_legacy; -ALTER TABLE Community ADD admin RAW(16) REFERENCES EPersonGroup(uuid); -CREATE INDEX Community_admin on Community(admin); -UPDATE Community SET admin = (SELECT EPersonGroup.uuid FROM EPersonGroup WHERE Community.admin_legacy = EPersonGroup.eperson_group_id); -ALTER TABLE Community DROP COLUMN admin_legacy; - -ALTER TABLE Community RENAME COLUMN logo_bitstream_id to logo_bitstream_legacy_id; -ALTER TABLE Community ADD logo_bitstream_id RAW(16) REFERENCES Bitstream(uuid); -CREATE INDEX Community_bitstream on Community(logo_bitstream_id); -UPDATE Community SET logo_bitstream_id = (SELECT Bitstream.uuid FROM Bitstream WHERE Community.logo_bitstream_legacy_id = Bitstream.bitstream_id); -ALTER TABLE Community DROP COLUMN logo_bitstream_legacy_id; - - ---Migrate Collection references -ALTER TABLE Collection RENAME COLUMN workflow_step_1 to workflow_step_1_legacy; -ALTER TABLE Collection RENAME COLUMN workflow_step_2 to workflow_step_2_legacy; -ALTER TABLE Collection RENAME COLUMN workflow_step_3 to workflow_step_3_legacy; -ALTER TABLE Collection RENAME COLUMN submitter to submitter_legacy; -ALTER TABLE Collection RENAME COLUMN template_item_id to template_item_legacy_id; -ALTER TABLE Collection RENAME COLUMN logo_bitstream_id to logo_bitstream_legacy_id; -ALTER TABLE Collection RENAME COLUMN admin to admin_legacy; -ALTER TABLE Collection ADD workflow_step_1 RAW(16) REFERENCES EPersonGroup(uuid); -ALTER TABLE Collection ADD workflow_step_2 RAW(16) REFERENCES EPersonGroup(uuid); -ALTER TABLE Collection ADD workflow_step_3 RAW(16) REFERENCES EPersonGroup(uuid); -ALTER TABLE Collection ADD submitter RAW(16) REFERENCES EPersonGroup(uuid); -ALTER TABLE Collection ADD template_item_id RAW(16); -ALTER TABLE Collection ADD logo_bitstream_id RAW(16); -ALTER TABLE Collection ADD admin RAW(16) REFERENCES EPersonGroup(uuid); -CREATE INDEX Collection_workflow1 on Collection(workflow_step_1); -CREATE INDEX Collection_workflow2 on Collection(workflow_step_2); -CREATE INDEX Collection_workflow3 on Collection(workflow_step_3); -CREATE INDEX Collection_submitter on Collection(submitter); -CREATE INDEX Collection_template on Collection(template_item_id); -CREATE INDEX Collection_bitstream on Collection(logo_bitstream_id); -UPDATE Collection SET workflow_step_1 = (SELECT EPersonGroup.uuid FROM EPersonGroup WHERE Collection.workflow_step_1_legacy = EPersonGroup.eperson_group_id); -UPDATE Collection SET workflow_step_2 = (SELECT EPersonGroup.uuid FROM EPersonGroup WHERE Collection.workflow_step_2_legacy = EPersonGroup.eperson_group_id); -UPDATE Collection SET workflow_step_3 = (SELECT EPersonGroup.uuid FROM EPersonGroup WHERE Collection.workflow_step_3_legacy = EPersonGroup.eperson_group_id); -UPDATE Collection SET submitter = (SELECT EPersonGroup.uuid FROM EPersonGroup WHERE Collection.submitter_legacy = EPersonGroup.eperson_group_id); -UPDATE Collection SET template_item_id = (SELECT Item.uuid FROM Item WHERE Collection.template_item_legacy_id = Item.item_id); -UPDATE Collection SET logo_bitstream_id = (SELECT Bitstream.uuid FROM Bitstream WHERE Collection.logo_bitstream_legacy_id = Bitstream.bitstream_id); -UPDATE Collection SET admin = (SELECT EPersonGroup.uuid FROM EPersonGroup WHERE Collection.admin_legacy = EPersonGroup.eperson_group_id); -ALTER TABLE Collection DROP COLUMN workflow_step_1_legacy; -ALTER TABLE Collection DROP COLUMN workflow_step_2_legacy; -ALTER TABLE Collection DROP COLUMN workflow_step_3_legacy; -ALTER TABLE Collection DROP COLUMN submitter_legacy; -ALTER TABLE Collection DROP COLUMN template_item_legacy_id; -ALTER TABLE Collection DROP COLUMN logo_bitstream_legacy_id; -ALTER TABLE Collection DROP COLUMN admin_legacy; - - --- Migrate resource policy references -ALTER TABLE ResourcePolicy RENAME COLUMN eperson_id to eperson_id_legacy_id; -ALTER TABLE ResourcePolicy ADD eperson_id RAW(16) REFERENCES EPerson(uuid); -CREATE INDEX resourcepolicy_person on resourcepolicy(eperson_id); -UPDATE ResourcePolicy SET eperson_id = (SELECT eperson.uuid FROM eperson WHERE ResourcePolicy.eperson_id_legacy_id = eperson.eperson_id); -ALTER TABLE ResourcePolicy DROP COLUMN eperson_id_legacy_id; - -ALTER TABLE ResourcePolicy RENAME COLUMN epersongroup_id to epersongroup_id_legacy_id; -ALTER TABLE ResourcePolicy ADD epersongroup_id RAW(16) REFERENCES EPersonGroup(uuid); -CREATE INDEX resourcepolicy_group on resourcepolicy(epersongroup_id); -UPDATE ResourcePolicy SET epersongroup_id = (SELECT epersongroup.uuid FROM epersongroup WHERE ResourcePolicy.epersongroup_id_legacy_id = epersongroup.eperson_group_id); -ALTER TABLE ResourcePolicy DROP COLUMN epersongroup_id_legacy_id; - -ALTER TABLE ResourcePolicy ADD dspace_object RAW(16) REFERENCES dspaceobject(uuid); -CREATE INDEX resourcepolicy_object on resourcepolicy(dspace_object); -UPDATE ResourcePolicy SET dspace_object = (SELECT eperson.uuid FROM eperson WHERE ResourcePolicy.resource_id = eperson.eperson_id AND ResourcePolicy.resource_type_id = 7) WHERE ResourcePolicy.resource_type_id = 7; -UPDATE ResourcePolicy SET dspace_object = (SELECT epersongroup.uuid FROM epersongroup WHERE ResourcePolicy.resource_id = epersongroup.eperson_group_id AND ResourcePolicy.resource_type_id = 6) WHERE ResourcePolicy.resource_type_id = 6; -UPDATE ResourcePolicy SET dspace_object = (SELECT community.uuid FROM community WHERE ResourcePolicy.resource_id = community.community_id AND ResourcePolicy.resource_type_id = 4) WHERE ResourcePolicy.resource_type_id = 4; -UPDATE ResourcePolicy SET dspace_object = (SELECT collection.uuid FROM collection WHERE ResourcePolicy.resource_id = collection.collection_id AND ResourcePolicy.resource_type_id = 3) WHERE ResourcePolicy.resource_type_id = 3; -UPDATE ResourcePolicy SET dspace_object = (SELECT item.uuid FROM item WHERE ResourcePolicy.resource_id = item.item_id AND ResourcePolicy.resource_type_id = 2) WHERE ResourcePolicy.resource_type_id = 2; -UPDATE ResourcePolicy SET dspace_object = (SELECT bundle.uuid FROM bundle WHERE ResourcePolicy.resource_id = bundle.bundle_id AND ResourcePolicy.resource_type_id = 1) WHERE ResourcePolicy.resource_type_id = 1; -UPDATE ResourcePolicy SET dspace_object = (SELECT bitstream.uuid FROM bitstream WHERE ResourcePolicy.resource_id = bitstream.bitstream_id AND ResourcePolicy.resource_type_id = 0) WHERE ResourcePolicy.resource_type_id = 0; -UPDATE resourcepolicy SET resource_type_id = -1 WHERE resource_type_id IS NULL; -UPDATE resourcepolicy SET action_id = -1 WHERE action_id IS NULL; - - --- Migrate Subscription -ALTER TABLE Subscription RENAME COLUMN eperson_id to eperson_legacy_id; -ALTER TABLE Subscription ADD eperson_id RAW(16) REFERENCES EPerson(uuid); -CREATE INDEX Subscription_person on Subscription(eperson_id); -UPDATE Subscription SET eperson_id = (SELECT eperson.uuid FROM eperson WHERE Subscription.eperson_legacy_id = eperson.eperson_id); -ALTER TABLE Subscription DROP COLUMN eperson_legacy_id; - -ALTER TABLE Subscription RENAME COLUMN collection_id to collection_legacy_id; -ALTER TABLE Subscription ADD collection_id RAW(16) REFERENCES Collection(uuid); -CREATE INDEX Subscription_collection on Subscription(collection_id); -UPDATE Subscription SET collection_id = (SELECT collection.uuid FROM collection WHERE Subscription.collection_legacy_id = collection.collection_id); -ALTER TABLE Subscription DROP COLUMN collection_legacy_id; - - --- Migrate versionitem -ALTER TABLE versionitem RENAME COLUMN eperson_id to eperson_legacy_id; -ALTER TABLE versionitem ADD eperson_id RAW(16) REFERENCES EPerson(uuid); -CREATE INDEX versionitem_person on versionitem(eperson_id); -UPDATE versionitem SET eperson_id = (SELECT eperson.uuid FROM eperson WHERE versionitem.eperson_legacy_id = eperson.eperson_id); -ALTER TABLE versionitem DROP COLUMN eperson_legacy_id; - -ALTER TABLE versionitem RENAME COLUMN item_id to item_legacy_id; -ALTER TABLE versionitem ADD item_id RAW(16) REFERENCES Item(uuid); -CREATE INDEX versionitem_item on versionitem(item_id); -UPDATE versionitem SET item_id = (SELECT item.uuid FROM item WHERE versionitem.item_legacy_id = item.item_id); -ALTER TABLE versionitem DROP COLUMN item_legacy_id; -UPDATE versionitem SET version_number = -1 WHERE version_number IS NULL; - --- Migrate handle table -ALTER TABLE handle RENAME COLUMN resource_id to resource_legacy_id; -ALTER TABLE handle ADD resource_id RAW(16) REFERENCES dspaceobject(uuid); -CREATE INDEX handle_object on handle(resource_id); -UPDATE handle SET resource_id = (SELECT community.uuid FROM community WHERE handle.resource_legacy_id = community.community_id AND handle.resource_type_id = 4); -UPDATE handle SET resource_id = (SELECT collection.uuid FROM collection WHERE handle.resource_legacy_id = collection.collection_id AND handle.resource_type_id = 3); -UPDATE handle SET resource_id = (SELECT item.uuid FROM item WHERE handle.resource_legacy_id = item.item_id AND handle.resource_type_id = 2); - --- Migrate metadata value table -DROP VIEW dcvalue; - -ALTER TABLE metadatavalue ADD dspace_object_id RAW(16) REFERENCES dspaceobject(uuid); --- CREATE INDEX metadatavalue_field on metadatavalue(metadata_field_id); -CREATE INDEX metadatavalue_object on metadatavalue(dspace_object_id); -CREATE INDEX metadatavalue_field_object on metadatavalue(metadata_field_id, dspace_object_id); -UPDATE metadatavalue SET dspace_object_id = (SELECT eperson.uuid FROM eperson WHERE metadatavalue.resource_id = eperson.eperson_id AND metadatavalue.resource_type_id = 7) WHERE metadatavalue.resource_type_id= 7; -UPDATE metadatavalue SET dspace_object_id = (SELECT epersongroup.uuid FROM epersongroup WHERE metadatavalue.resource_id = epersongroup.eperson_group_id AND metadatavalue.resource_type_id = 6) WHERE metadatavalue.resource_type_id= 6; -UPDATE metadatavalue SET dspace_object_id = (SELECT community.uuid FROM community WHERE metadatavalue.resource_id = community.community_id AND metadatavalue.resource_type_id = 4) WHERE metadatavalue.resource_type_id= 4; -UPDATE metadatavalue SET dspace_object_id = (SELECT collection.uuid FROM collection WHERE metadatavalue.resource_id = collection.collection_id AND metadatavalue.resource_type_id = 3) WHERE metadatavalue.resource_type_id= 3; -UPDATE metadatavalue SET dspace_object_id = (SELECT item.uuid FROM item WHERE metadatavalue.resource_id = item.item_id AND metadatavalue.resource_type_id = 2) WHERE metadatavalue.resource_type_id= 2; -UPDATE metadatavalue SET dspace_object_id = (SELECT bundle.uuid FROM bundle WHERE metadatavalue.resource_id = bundle.bundle_id AND metadatavalue.resource_type_id = 1) WHERE metadatavalue.resource_type_id= 1; -UPDATE metadatavalue SET dspace_object_id = (SELECT bitstream.uuid FROM bitstream WHERE metadatavalue.resource_id = bitstream.bitstream_id AND metadatavalue.resource_type_id = 0) WHERE metadatavalue.resource_type_id= 0; -DROP INDEX metadatavalue_item_idx; -DROP INDEX metadatavalue_item_idx2; -ALTER TABLE metadatavalue DROP COLUMN resource_id; -ALTER TABLE metadatavalue DROP COLUMN resource_type_id; -UPDATE MetadataValue SET confidence = -1 WHERE confidence IS NULL; -UPDATE metadatavalue SET place = -1 WHERE place IS NULL; - --- Alter harvested item -ALTER TABLE harvested_item RENAME COLUMN item_id to item_legacy_id; -ALTER TABLE harvested_item ADD item_id RAW(16) REFERENCES item(uuid); -CREATE INDEX harvested_item_item on harvested_item(item_id); -UPDATE harvested_item SET item_id = (SELECT item.uuid FROM item WHERE harvested_item.item_legacy_id = item.item_id); -ALTER TABLE harvested_item DROP COLUMN item_legacy_id; - --- Alter harvested collection -ALTER TABLE harvested_collection RENAME COLUMN collection_id to collection_legacy_id; -ALTER TABLE harvested_collection ADD collection_id RAW(16) REFERENCES Collection(uuid); -CREATE INDEX harvested_collection_collectio on harvested_collection(collection_id); -UPDATE harvested_collection SET collection_id = (SELECT collection.uuid FROM collection WHERE harvested_collection.collection_legacy_id = collection.collection_id); -ALTER TABLE harvested_collection DROP COLUMN collection_legacy_id; - -UPDATE harvested_collection SET harvest_type = -1 WHERE harvest_type IS NULL; -UPDATE harvested_collection SET harvest_status = -1 WHERE harvest_status IS NULL; - - ---Alter workspaceitem -ALTER TABLE workspaceitem RENAME COLUMN item_id to item_legacy_id; -ALTER TABLE workspaceitem ADD item_id RAW(16) REFERENCES Item(uuid); -CREATE INDEX workspaceitem_item on workspaceitem(item_id); -UPDATE workspaceitem SET item_id = (SELECT item.uuid FROM item WHERE workspaceitem.item_legacy_id = item.item_id); -ALTER TABLE workspaceitem DROP COLUMN item_legacy_id; - -ALTER TABLE workspaceitem RENAME COLUMN collection_id to collection_legacy_id; -ALTER TABLE workspaceitem ADD collection_id RAW(16) REFERENCES Collection(uuid); -CREATE INDEX workspaceitem_coll on workspaceitem(collection_id); -UPDATE workspaceitem SET collection_id = (SELECT collection.uuid FROM collection WHERE workspaceitem.collection_legacy_id = collection.collection_id); -ALTER TABLE workspaceitem DROP COLUMN collection_legacy_id; - -UPDATE workspaceitem SET multiple_titles = '0' WHERE multiple_titles IS NULL; -UPDATE workspaceitem SET published_before = '0' WHERE published_before IS NULL; -UPDATE workspaceitem SET multiple_files = '0' WHERE multiple_files IS NULL; -UPDATE workspaceitem SET stage_reached = -1 WHERE stage_reached IS NULL; -UPDATE workspaceitem SET page_reached = -1 WHERE page_reached IS NULL; - ---Alter epersongroup2workspaceitem -ALTER TABLE epersongroup2workspaceitem RENAME COLUMN eperson_group_id to eperson_group_legacy_id; -ALTER TABLE epersongroup2workspaceitem ADD eperson_group_id RAW(16) REFERENCES epersongroup(uuid); -CREATE INDEX epersongroup2workspaceitem_gro on epersongroup2workspaceitem(eperson_group_id); -UPDATE epersongroup2workspaceitem SET eperson_group_id = (SELECT epersongroup.uuid FROM epersongroup WHERE epersongroup2workspaceitem.eperson_group_legacy_id = epersongroup.eperson_group_id); -ALTER TABLE epersongroup2workspaceitem DROP COLUMN eperson_group_legacy_id; - -ALTER TABLE epersongroup2workspaceitem DROP COLUMN id; -ALTER TABLE epersongroup2workspaceitem MODIFY workspace_item_id NOT NULL; -ALTER TABLE epersongroup2workspaceitem MODIFY eperson_group_id NOT NULL; -ALTER TABLE epersongroup2workspaceitem add CONSTRAINT epersongroup2wsitem_unqiue primary key (workspace_item_id,eperson_group_id); - ---Alter most_recent_checksum -ALTER TABLE most_recent_checksum RENAME COLUMN bitstream_id to bitstream_legacy_id; -ALTER TABLE most_recent_checksum ADD bitstream_id RAW(16) REFERENCES Bitstream(uuid); -CREATE INDEX most_recent_checksum_bitstream on most_recent_checksum(bitstream_id); -UPDATE most_recent_checksum SET bitstream_id = (SELECT Bitstream.uuid FROM Bitstream WHERE most_recent_checksum.bitstream_legacy_id = Bitstream.bitstream_id); -ALTER TABLE most_recent_checksum DROP COLUMN bitstream_legacy_id; - -UPDATE most_recent_checksum SET to_be_processed = '0' WHERE to_be_processed IS NULL; -UPDATE most_recent_checksum SET matched_prev_checksum = '0' WHERE matched_prev_checksum IS NULL; - ---Alter checksum_history -ALTER TABLE checksum_history RENAME COLUMN bitstream_id to bitstream_legacy_id; -ALTER TABLE checksum_history ADD bitstream_id RAW(16) REFERENCES Bitstream(uuid); -CREATE INDEX checksum_history_bitstream on checksum_history(bitstream_id); -UPDATE checksum_history SET bitstream_id = (SELECT Bitstream.uuid FROM Bitstream WHERE checksum_history.bitstream_legacy_id = Bitstream.bitstream_id); -ALTER TABLE checksum_history DROP COLUMN bitstream_legacy_id; - -RENAME checksum_history_seq TO checksum_history_check_id_seq; - ---Alter table doi -ALTER TABLE doi ADD dspace_object RAW(16) REFERENCES dspaceobject(uuid); -CREATE INDEX doi_object on doi(dspace_object); -UPDATE doi SET dspace_object = (SELECT community.uuid FROM community WHERE doi.resource_id = community.community_id AND doi.resource_type_id = 4) WHERE doi.resource_type_id = 4; -UPDATE doi SET dspace_object = (SELECT collection.uuid FROM collection WHERE doi.resource_id = collection.collection_id AND doi.resource_type_id = 3) WHERE doi.resource_type_id = 3; -UPDATE doi SET dspace_object = (SELECT item.uuid FROM item WHERE doi.resource_id = item.item_id AND doi.resource_type_id = 2) WHERE doi.resource_type_id = 2; -UPDATE doi SET dspace_object = (SELECT bundle.uuid FROM bundle WHERE doi.resource_id = bundle.bundle_id AND doi.resource_type_id = 1) WHERE doi.resource_type_id = 1; -UPDATE doi SET dspace_object = (SELECT bitstream.uuid FROM bitstream WHERE doi.resource_id = bitstream.bitstream_id AND doi.resource_type_id = 0) WHERE doi.resource_type_id = 0; - ---Update table bitstreamformatregistry -UPDATE bitstreamformatregistry SET support_level = -1 WHERE support_level IS NULL; - ---Update table requestitem -UPDATE requestitem SET allfiles = '0' WHERE allfiles IS NULL; -UPDATE requestitem SET accept_request = '0' WHERE accept_request IS NULL; - ---Update table webapp -UPDATE webapp SET isui = -1 WHERE isui IS NULL; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2015_03_06_01__DS_3378_lost_oracle_indexes.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2015_03_06_01__DS_3378_lost_oracle_indexes.sql deleted file mode 100644 index 8f1a7ad157..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2015_03_06_01__DS_3378_lost_oracle_indexes.sql +++ /dev/null @@ -1,18 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS_3378 Lost oracle indexes ------------------------------------------------------- -CREATE UNIQUE INDEX eperson_eperson on eperson(eperson_id); -CREATE UNIQUE INDEX epersongroup_eperson_group on epersongroup(eperson_group_id); -CREATE UNIQUE INDEX community_community on community(community_id); -CREATE UNIQUE INDEX collection_collection on collection(collection_id); -CREATE UNIQUE INDEX item_item on item(item_id); -CREATE UNIQUE INDEX bundle_bundle on bundle(bundle_id); -CREATE UNIQUE INDEX bitstream_bitstream on bitstream(bitstream_id); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.01.03__DS-3024.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.01.03__DS-3024.sql deleted file mode 100644 index 8ad6f7fcd2..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.01.03__DS-3024.sql +++ /dev/null @@ -1,25 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS-3024 Invent "permanent" groups ------------------------------------------------------- - -ALTER TABLE epersongroup - ADD (permanent NUMBER(1) DEFAULT 0); -UPDATE epersongroup SET permanent = 1 - WHERE uuid IN ( - SELECT dspace_object_id - FROM metadataschemaregistry s - JOIN metadatafieldregistry f USING (metadata_schema_id) - JOIN metadatavalue v USING (metadata_field_id) - WHERE s.short_id = 'dc' - AND f.element = 'title' - AND f.qualifier IS NULL - AND dbms_lob.compare(v.text_value, 'Administrator') = 0 OR dbms_lob.compare(v.text_value,'Anonymous') = 0 - ); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.02.25__DS-3004-slow-searching-as-admin.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.02.25__DS-3004-slow-searching-as-admin.sql deleted file mode 100644 index 18cb4a5084..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.02.25__DS-3004-slow-searching-as-admin.sql +++ /dev/null @@ -1,30 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ---------------------------------------------------------------- --- DS-3024 extremely slow searching when logged in as admin ---------------------------------------------------------------- --- This script will put the group name on the epersongroup --- record itself for performance reasons. It will also make --- sure that a group name is unique (so that for example no two --- Administrator groups can be created). ---------------------------------------------------------------- - -ALTER TABLE epersongroup -ADD name VARCHAR2(250); - -CREATE UNIQUE INDEX epersongroup_unique_idx_name on epersongroup(name); - -UPDATE epersongroup -SET name = -(SELECT text_value - FROM metadatavalue v - JOIN metadatafieldregistry field on v.metadata_field_id = field.metadata_field_id - JOIN metadataschemaregistry s ON field.metadata_schema_id = s.metadata_schema_id - WHERE s.short_id = 'dc' AND element = 'title' AND qualifier IS NULL - AND v.dspace_object_id = epersongroup.uuid); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.01__DS-1955_Increase_embargo_reason.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.01__DS-1955_Increase_embargo_reason.sql deleted file mode 100644 index e0a103749c..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.01__DS-1955_Increase_embargo_reason.sql +++ /dev/null @@ -1,25 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - ------------------------------------------------------- --- DS-1955 resize rpdescription for embargo reason ------------------------------------------------------- - --- We cannot alter type between varchar2 & clob directly so an in between column is required -ALTER TABLE resourcepolicy ADD rpdescription_clob CLOB; -UPDATE resourcepolicy SET rpdescription_clob=rpdescription, rpdescription=null; -ALTER TABLE resourcepolicy DROP COLUMN rpdescription; -ALTER TABLE resourcepolicy RENAME COLUMN rpdescription_clob TO rpdescription; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.04__DS-3086-OAI-Performance-fix.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.04__DS-3086-OAI-Performance-fix.sql deleted file mode 100644 index 7b13d10b6d..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.04__DS-3086-OAI-Performance-fix.sql +++ /dev/null @@ -1,46 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ---------------------------------------------------------------- --- DS-3086 OAI Harvesting performance ---------------------------------------------------------------- --- This script will create indexes on the key fields of the --- metadataschemaregistry and metadatafieldregistry tables to --- increase the performance of the queries. It will also add --- "ON DELETE CASCADE" to improve the performance of Item deletion. ---------------------------------------------------------------- - -CREATE UNIQUE INDEX metadataschema_idx_short_id on metadataschemaregistry(short_id); - -CREATE INDEX metadatafield_idx_elem_qual on metadatafieldregistry(element, qualifier); - -CREATE INDEX resourcepolicy_idx_rptype on resourcepolicy(rptype); - --- Add "ON DELETE CASCADE" to foreign key constraint to Item -ALTER TABLE RESOURCEPOLICY ADD DSPACE_OBJECT_NEW RAW(16); -UPDATE RESOURCEPOLICY SET DSPACE_OBJECT_NEW = DSPACE_OBJECT; -ALTER TABLE RESOURCEPOLICY DROP COLUMN DSPACE_OBJECT; -ALTER TABLE RESOURCEPOLICY RENAME COLUMN DSPACE_OBJECT_NEW to DSPACE_OBJECT; - -ALTER TABLE RESOURCEPOLICY -ADD CONSTRAINT RESOURCEPOLICY_DSPACE_OBJ_FK -FOREIGN KEY (DSPACE_OBJECT) -REFERENCES dspaceobject(uuid) -ON DELETE CASCADE; - --- Add "ON DELETE CASCADE" to foreign key constraint to Item -ALTER TABLE METADATAVALUE ADD DSPACE_OBJECT_NEW RAW(16); -UPDATE METADATAVALUE SET DSPACE_OBJECT_NEW = DSPACE_OBJECT_ID; -ALTER TABLE METADATAVALUE DROP COLUMN DSPACE_OBJECT_ID; -ALTER TABLE METADATAVALUE RENAME COLUMN DSPACE_OBJECT_NEW to DSPACE_OBJECT_ID; - -ALTER TABLE METADATAVALUE -ADD CONSTRAINT METADATAVALUE_DSPACE_OBJECT_FK -FOREIGN KEY (DSPACE_OBJECT_ID) -REFERENCES DSPACEOBJECT(UUID) -ON DELETE CASCADE; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.14__DS-3125-fix-bundle-bitstream-delete-rights.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.14__DS-3125-fix-bundle-bitstream-delete-rights.sql deleted file mode 100644 index a1b303f036..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.04.14__DS-3125-fix-bundle-bitstream-delete-rights.sql +++ /dev/null @@ -1,33 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ---------------------------------------------------------------- --- DS-3125 Submitters cannot delete bistreams of workspaceitems ---------------------------------------------------------------- --- This script will add delete rights on all bundles/bitstreams --- for people who already have REMOVE rights. --- In previous versions REMOVE rights was enough to ensure that --- you could delete an object. ---------------------------------------------------------------- -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, start_date, end_date, rpname, -rptype, rpdescription, eperson_id, epersongroup_id, dspace_object) -SELECT -resourcepolicy_seq.nextval AS policy_id, -resource_type_id, -resource_id, --- Insert the Constants.DELETE action -2 AS action_id, -start_date, -end_date, -rpname, -rptype, -rpdescription, -eperson_id, -epersongroup_id, -dspace_object -FROM resourcepolicy WHERE action_id=4 AND (resource_type_id=0 OR resource_type_id=1 OR resource_type_id=2); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.05.10__DS-3168-fix-requestitem_item_id_column.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.05.10__DS-3168-fix-requestitem_item_id_column.sql deleted file mode 100644 index 2ba3517e19..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.05.10__DS-3168-fix-requestitem_item_id_column.sql +++ /dev/null @@ -1,24 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ---------------------------------------------------------------- --- DS-3168 Embargo request Unknown Entity RequestItem ---------------------------------------------------------------- --- convert the item_id and bitstream_id columns from integer to UUID ---------------------------------------------------------------- -ALTER TABLE requestitem RENAME COLUMN item_id to item_legacy_id; -ALTER TABLE requestitem ADD item_id RAW(16) REFERENCES Item(uuid); -CREATE INDEX requestitem_item on requestitem(item_id); -UPDATE requestitem SET item_id = (SELECT item.uuid FROM item WHERE requestitem.item_legacy_id = item.item_id); -ALTER TABLE requestitem DROP COLUMN item_legacy_id; - -ALTER TABLE requestitem RENAME COLUMN bitstream_id to bitstream_legacy_id; -ALTER TABLE requestitem ADD bitstream_id RAW(16) REFERENCES Bitstream(uuid); -CREATE INDEX requestitem_bitstream on requestitem(bitstream_id); -UPDATE requestitem SET bitstream_id = (SELECT Bitstream.uuid FROM Bitstream WHERE requestitem.bitstream_legacy_id = Bitstream.bitstream_id); -ALTER TABLE requestitem DROP COLUMN bitstream_legacy_id; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.07.21__DS-2775.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.07.21__DS-2775.sql deleted file mode 100644 index 7478397446..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.07.21__DS-2775.sql +++ /dev/null @@ -1,30 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS-2775 Drop unused sequences ------------------------------------------------------- - -DROP SEQUENCE bitstream_seq; -DROP SEQUENCE bundle2bitstream_seq; -DROP SEQUENCE bundle_seq; -DROP SEQUENCE collection2item_seq; -DROP SEQUENCE collection_seq; -DROP SEQUENCE community2collection_seq; -DROP SEQUENCE community2community_seq; -DROP SEQUENCE community_seq; -DROP SEQUENCE dcvalue_seq; -DROP SEQUENCE eperson_seq; -DROP SEQUENCE epersongroup2eperson_seq; -DROP SEQUENCE epersongroup2workspaceitem_seq; -DROP SEQUENCE epersongroup_seq; -DROP SEQUENCE group2group_seq; -DROP SEQUENCE group2groupcache_seq; -DROP SEQUENCE historystate_seq; -DROP SEQUENCE item2bundle_seq; -DROP SEQUENCE item_seq; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.07.26__DS-3277_fix_handle_assignment.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.07.26__DS-3277_fix_handle_assignment.sql deleted file mode 100644 index 96f125f78b..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.07.26__DS-3277_fix_handle_assignment.sql +++ /dev/null @@ -1,44 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ----------------------------------------------------------------------------------- --- DS-3277 : 'handle_id' column needs its own separate sequence, so that Handles --- can be minted from 'handle_seq' ----------------------------------------------------------------------------------- --- Create a new sequence for 'handle_id' column. --- The role of this sequence is to simply provide a unique internal ID to the database. -CREATE SEQUENCE handle_id_seq; --- Initialize new 'handle_id_seq' to the maximum value of 'handle_id' -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(handle_id) INTO curr FROM handle; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE handle_id_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE handle_id_seq START WITH ' || NVL(curr,1); -END; -/ - --- Ensure the 'handle_seq' is updated to the maximum *suffix* in 'handle' column, --- as this sequence is used to mint new Handles. --- Code borrowed from update-sequences.sql and updateseq.sql -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(to_number(regexp_replace(handle, '.*/', ''), '999999999999')) INTO curr FROM handle WHERE REGEXP_LIKE(handle, '^.*/[0123456789]*$'); - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE handle_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE handle_seq START WITH ' || NVL(curr,1); -END; -/ \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.08.23__DS-3097.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.08.23__DS-3097.sql deleted file mode 100644 index e1220c8c7c..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.08.23__DS-3097.sql +++ /dev/null @@ -1,24 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS-3097 introduced new action id for WITHDRAWN_READ ------------------------------------------------------- - -UPDATE resourcepolicy SET action_id = 12 where action_id = 0 and dspace_object in ( - SELECT bundle2bitstream.bitstream_id FROM bundle2bitstream - LEFT JOIN item2bundle ON bundle2bitstream.bundle_id = item2bundle.bundle_id - LEFT JOIN item ON item2bundle.item_id = item.uuid - WHERE item.withdrawn = 1 -); - -UPDATE resourcepolicy SET action_id = 12 where action_id = 0 and dspace_object in ( - SELECT item2bundle.bundle_id FROM item2bundle - LEFT JOIN item ON item2bundle.item_id = item.uuid - WHERE item.withdrawn = 1 -); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.11.29__DS-3410-lost-indexes.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.11.29__DS-3410-lost-indexes.sql deleted file mode 100644 index 5c3c3842aa..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.11.29__DS-3410-lost-indexes.sql +++ /dev/null @@ -1,17 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ---------------------------------------------------------------- --- DS-3410 ---------------------------------------------------------------- --- This script will create lost indexes ---------------------------------------------------------------- - -CREATE INDEX resourcepolicy_object on resourcepolicy(dspace_object); -CREATE INDEX metadatavalue_object on metadatavalue(dspace_object_id); -CREATE INDEX metadatavalue_field_object on metadatavalue(metadata_field_id, dspace_object_id); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.11.30__DS-3409.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.11.30__DS-3409.sql deleted file mode 100644 index 47b2d18be8..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V6.0_2016.11.30__DS-3409.sql +++ /dev/null @@ -1,16 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS-3097 Handle of collections and communities are lost due to bug at V6.0_2015.03.07__DS-2701_Hibernate_migration.sql ------------------------------------------------------- - -UPDATE handle SET resource_id = (SELECT community.uuid FROM community WHERE handle.resource_legacy_id = community.community_id AND handle.resource_type_id = 4) where handle.resource_type_id = 4; -UPDATE handle SET resource_id = (SELECT collection.uuid FROM collection WHERE handle.resource_legacy_id = collection.collection_id AND handle.resource_type_id = 3) where handle.resource_type_id = 3; -UPDATE handle SET resource_id = (SELECT item.uuid FROM item WHERE handle.resource_legacy_id = item.item_id AND handle.resource_type_id = 2) where handle.resource_type_id = 2; - \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2017.10.12__DS-3542-stateless-sessions.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2017.10.12__DS-3542-stateless-sessions.sql deleted file mode 100644 index 30cfae91c8..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2017.10.12__DS-3542-stateless-sessions.sql +++ /dev/null @@ -1,20 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - ------------------------------------------------------------------------------------------------------------- --- This adds an extra column to the eperson table where we save a salt for stateless authentication ------------------------------------------------------------------------------------------------------------- -ALTER TABLE eperson ADD session_salt varchar(32); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2018.04.16__dspace-entities.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2018.04.16__dspace-entities.sql deleted file mode 100644 index fc1c0b2e23..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2018.04.16__dspace-entities.sql +++ /dev/null @@ -1,65 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -------------------------------------------------------------- --- This will create the setup for the dspace 7 entities usage -------------------------------------------------------------- -CREATE SEQUENCE entity_type_id_seq; -CREATE SEQUENCE relationship_type_id_seq; -CREATE SEQUENCE relationship_id_seq; - -CREATE TABLE entity_type -( - id INTEGER NOT NULL PRIMARY KEY, - label varchar(32) UNIQUE NOT NULL -); - -CREATE TABLE relationship_type -( - id INTEGER NOT NULL PRIMARY KEY, - left_type INTEGER NOT NULL, - right_type INTEGER NOT NULL, - left_label varchar(32) NOT NULL, - right_label varchar(32) NOT NULL, - left_min_cardinality INTEGER, - left_max_cardinality INTEGER, - right_min_cardinality INTEGER, - right_max_cardinality INTEGER, - FOREIGN KEY (left_type) REFERENCES entity_type(id), - FOREIGN KEY (right_type) REFERENCES entity_type(id), - CONSTRAINT u_relationship_type_constraint UNIQUE (left_type, right_type, left_label, right_label) - -); - -CREATE TABLE relationship -( - id INTEGER NOT NULL PRIMARY KEY, - left_id raw(16) NOT NULL REFERENCES item(uuid), - type_id INTEGER NOT NULL REFERENCES relationship_type(id), - right_id raw(16) NOT NULL REFERENCES item(uuid), - left_place INTEGER, - right_place INTEGER, - CONSTRAINT u_constraint UNIQUE (left_id, type_id, right_id) - -); - -CREATE INDEX entity_type_label_idx ON entity_type(label); -CREATE INDEX rl_ty_by_left_type_idx ON relationship_type(left_type); -CREATE INDEX rl_ty_by_right_type_idx ON relationship_type(right_type); -CREATE INDEX rl_ty_by_left_label_idx ON relationship_type(left_label); -CREATE INDEX rl_ty_by_right_label_idx ON relationship_type(right_label); -CREATE INDEX relationship_by_left_id_idx ON relationship(left_id); -CREATE INDEX relationship_by_right_id_idx ON relationship(right_id); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2018.06.07__DS-3851-permission.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2018.06.07__DS-3851-permission.sql deleted file mode 100644 index 68ed690f89..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2018.06.07__DS-3851-permission.sql +++ /dev/null @@ -1,24 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - ----------------------------------------------------------------------------------------------------------------- --- This adds TYPE_INHERITED to all old archived items permission due to the change on resource policy management ----------------------------------------------------------------------------------------------------------------- -UPDATE resourcepolicy set rptype = 'TYPE_INHERITED' - where resource_type_id = 2 and rptype is null - and dspace_object in ( - select uuid from item where in_archive = 1 - ); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.05.02__DS-4239-workflow-xml-migration.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.05.02__DS-4239-workflow-xml-migration.sql deleted file mode 100644 index b23170f437..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.05.02__DS-4239-workflow-xml-migration.sql +++ /dev/null @@ -1,17 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ---------------------------------------------------------------- --- DS-4239 Migrate the workflow.xml to spring ---------------------------------------------------------------- --- This script will rename the default workflow "default" name --- to the new "defaultWorkflow" identifier ---------------------------------------------------------------- - -UPDATE cwf_pooltask SET workflow_id='defaultWorkflow' WHERE workflow_id='default'; -UPDATE cwf_claimtask SET workflow_id='defaultWorkflow' WHERE workflow_id='default'; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.07.31__Retrieval_of_name_variant.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.07.31__Retrieval_of_name_variant.sql deleted file mode 100644 index cebae09f65..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.07.31__Retrieval_of_name_variant.sql +++ /dev/null @@ -1,18 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------------------------------------ --- Create columns leftwardValue and rightwardValue in table relationship --- Rename columns left_label and right_label to leftward_type and rightward_type ------------------------------------------------------------------------------------ - -ALTER TABLE relationship ADD leftward_value VARCHAR2(50); -ALTER TABLE relationship ADD rightward_value VARCHAR2(50); - -ALTER TABLE relationship_type RENAME COLUMN left_label TO leftward_type; -ALTER TABLE relationship_type RENAME COLUMN right_label TO rightward_type; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.11.13__relationship_type_copy_left_right.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.11.13__relationship_type_copy_left_right.sql deleted file mode 100644 index 0db294c1c1..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019.11.13__relationship_type_copy_left_right.sql +++ /dev/null @@ -1,14 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------------------------------------ --- Create columns copy_left and copy_right for RelationshipType ------------------------------------------------------------------------------------ - -ALTER TABLE relationship_type ADD copy_to_left NUMBER(1) DEFAULT 0 NOT NULL; -ALTER TABLE relationship_type ADD copy_to_right NUMBER(1) DEFAULT 0 NOT NULL; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019_06_14__scripts-and-process.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019_06_14__scripts-and-process.sql deleted file mode 100644 index a7015e3033..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2019_06_14__scripts-and-process.sql +++ /dev/null @@ -1,40 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== -CREATE SEQUENCE process_id_seq; - -CREATE TABLE process -( - process_id INTEGER NOT NULL PRIMARY KEY, - user_id RAW(16) NOT NULL, - start_time TIMESTAMP, - finished_time TIMESTAMP, - creation_time TIMESTAMP NOT NULL, - script VARCHAR(256) NOT NULL, - status VARCHAR(32), - parameters VARCHAR(512) -); - -CREATE TABLE process2bitstream -( - process_id INTEGER REFERENCES process(process_id), - bitstream_id RAW(16) REFERENCES bitstream(uuid), - CONSTRAINT PK_process2bitstream PRIMARY KEY (process_id, bitstream_id) -); - -CREATE INDEX process_user_id_idx ON process(user_id); -CREATE INDEX process_status_idx ON process(status); -CREATE INDEX process_name_idx on process(script); -CREATE INDEX process_start_time_idx on process(start_time); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2020.01.08__DS-626-statistics-tracker.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2020.01.08__DS-626-statistics-tracker.sql deleted file mode 100644 index a108fd74b4..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2020.01.08__DS-626-statistics-tracker.sql +++ /dev/null @@ -1,29 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -------------------------------------------------------------- --- This will create the setup for the IRUS statistics harvester -------------------------------------------------------------- - -CREATE SEQUENCE openurltracker_seq; - -CREATE TABLE openurltracker -( - tracker_id NUMBER, - tracker_url VARCHAR2(1000), - uploaddate DATE, - CONSTRAINT openurltracker_PK PRIMARY KEY (tracker_id) -); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.01.22__Remove_basic_workflow.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.01.22__Remove_basic_workflow.sql deleted file mode 100644 index f71173abe6..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.01.22__Remove_basic_workflow.sql +++ /dev/null @@ -1,17 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------------------------------------ --- Drop the 'workflowitem' and 'tasklistitem' tables ------------------------------------------------------------------------------------ - -DROP TABLE workflowitem CASCADE CONSTRAINTS; -DROP TABLE tasklistitem CASCADE CONSTRAINTS; - -DROP SEQUENCE workflowitem_seq; -DROP SEQUENCE tasklistitem_seq; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.02.08__tilted_rels.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.02.08__tilted_rels.sql deleted file mode 100644 index 95d07be477..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.02.08__tilted_rels.sql +++ /dev/null @@ -1,13 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------------------------------------ --- Create columns copy_left and copy_right for RelationshipType ------------------------------------------------------------------------------------ - -ALTER TABLE relationship_type ADD tilted INTEGER; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.03.18__Move_entity_type_to_dspace_schema.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.03.18__Move_entity_type_to_dspace_schema.sql deleted file mode 100644 index 9c39091f89..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.03.18__Move_entity_type_to_dspace_schema.sql +++ /dev/null @@ -1,56 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -------------------------------------------------------------------------------------------------------- --- Move all 'relationship.type' metadata fields to 'dspace.entity.type'. Remove 'relationship' schema. -------------------------------------------------------------------------------------------------------- --- Special case: we need to the 'dspace' schema to already exist. If users don't already have it we must create it --- manually via SQL, as by default it won't be created until database updates are finished. -INSERT INTO metadataschemaregistry (metadata_schema_id, namespace, short_id) - SELECT metadataschemaregistry_seq.nextval, 'http://dspace.org/dspace' as namespace, 'dspace' as short_id FROM dual - WHERE NOT EXISTS - (SELECT metadata_schema_id,namespace,short_id FROM metadataschemaregistry - WHERE namespace = 'http://dspace.org/dspace' AND short_id = 'dspace'); - - --- Add 'dspace.entity.type' field to registry (if missing) -INSERT INTO metadatafieldregistry (metadata_field_id, metadata_schema_id, element, qualifier) - SELECT metadatafieldregistry_seq.nextval, - (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='dspace'), 'entity', 'type' FROM dual - WHERE NOT EXISTS - (SELECT metadata_field_id,element,qualifier FROM metadatafieldregistry - WHERE metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='dspace') - AND element = 'entitye' AND qualifier='type'); - --- Moves all 'relationship.type' field values to a new 'dspace.entity.type' field -UPDATE metadatavalue - SET metadata_field_id = - (SELECT metadata_field_id FROM metadatafieldregistry - WHERE metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='dspace') - AND element = 'entity' AND qualifier='type') - WHERE metadata_field_id = - (SELECT metadata_field_id FROM metadatafieldregistry - WHERE metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id='relationship') - AND element = 'type' AND qualifier is NULL); - - --- Delete 'relationship.type' field from registry -DELETE FROM metadatafieldregistry - WHERE metadata_schema_id = (SELECT metadata_schema_id FROM metadataschemaregistry WHERE short_id = 'relationship') - AND element = 'type' AND qualifier is NULL; - --- Delete 'relationship' schema (which is now empty) -DELETE FROM metadataschemaregistry WHERE short_id = 'relationship' AND namespace = 'http://dspace.org/relationship'; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.09.24__Move_entity_type_from_item_template_to_collection.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.09.24__Move_entity_type_from_item_template_to_collection.sql deleted file mode 100644 index 5a6abda041..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.09.24__Move_entity_type_from_item_template_to_collection.sql +++ /dev/null @@ -1,28 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- =============================================================== --- WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING --- --- DO NOT MANUALLY RUN THIS DATABASE MIGRATION. IT WILL BE EXECUTED --- AUTOMATICALLY (IF NEEDED) BY "FLYWAY" WHEN YOU STARTUP DSPACE. --- http://flywaydb.org/ --- =============================================================== - -------------------------------------------------------------------------------------------------------- -------------------------------------------------------------------------------------------------------- -UPDATE metadatavalue SET dspace_object_id = (SELECT uuid - FROM collection - WHERE template_item_id = dspace_object_id) -WHERE dspace_object_id IN (SELECT template_item_id - FROM Collection) - AND metadata_field_id - IN (SELECT metadata_field_id - FROM metadatafieldregistry mfr LEFT JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE msr.short_id = 'dspace' AND mfr.element = 'entity' AND mfr.qualifier = 'type'); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.10.04__alter_collection_table_drop_workflow_stem_columns.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.10.04__alter_collection_table_drop_workflow_stem_columns.sql deleted file mode 100644 index ae8f1e7ef5..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.0_2021.10.04__alter_collection_table_drop_workflow_stem_columns.sql +++ /dev/null @@ -1,15 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - -------------------------------------------------------------------------------------- ----- ALTER table collection -------------------------------------------------------------------------------------- - -ALTER TABLE collection DROP COLUMN workflow_step_1; -ALTER TABLE collection DROP COLUMN workflow_step_2; -ALTER TABLE collection DROP COLUMN workflow_step_3; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.1_2021.10.18__Fix_MDV_place_after_migrating_from_DSpace_5.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.1_2021.10.18__Fix_MDV_place_after_migrating_from_DSpace_5.sql deleted file mode 100644 index 9c39c15e66..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.1_2021.10.18__Fix_MDV_place_after_migrating_from_DSpace_5.sql +++ /dev/null @@ -1,24 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ----------------------------------------------------- --- Make sure the metadatavalue.place column starts at 0 instead of 1 ----------------------------------------------------- -MERGE INTO metadatavalue mdv -USING ( - SELECT dspace_object_id, metadata_field_id, MIN(place) AS minplace - FROM metadatavalue - GROUP BY dspace_object_id, metadata_field_id -) mp -ON ( - mdv.dspace_object_id = mp.dspace_object_id - AND mdv.metadata_field_id = mp.metadata_field_id - AND mp.minplace > 0 -) -WHEN MATCHED THEN UPDATE -SET mdv.place = mdv.place - mp.minplace; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.04.29__orcid_queue_and_history.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.04.29__orcid_queue_and_history.sql deleted file mode 100644 index 3fe424cf6c..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.04.29__orcid_queue_and_history.sql +++ /dev/null @@ -1,54 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------------------------------------ --- Create tables for ORCID Queue and History ------------------------------------------------------------------------------------ - -CREATE SEQUENCE orcid_queue_id_seq; - -CREATE TABLE orcid_queue -( - id INTEGER NOT NULL, - owner_id RAW(16) NOT NULL, - entity_id RAW(16), - put_code VARCHAR(255), - record_type VARCHAR(255), - description VARCHAR(255), - operation VARCHAR(255), - metadata CLOB, - attempts INTEGER, - CONSTRAINT orcid_queue_pkey PRIMARY KEY (id), - CONSTRAINT orcid_queue_owner_id_fkey FOREIGN KEY (owner_id) REFERENCES item (uuid), - CONSTRAINT orcid_queue_entity_id_fkey FOREIGN KEY (entity_id) REFERENCES item (uuid) -); - -CREATE INDEX orcid_queue_owner_id_index on orcid_queue(owner_id); - - -CREATE SEQUENCE orcid_history_id_seq; - -CREATE TABLE orcid_history -( - id INTEGER NOT NULL, - owner_id RAW(16) NOT NULL, - entity_id RAW(16), - put_code VARCHAR(255), - timestamp_last_attempt TIMESTAMP, - response_message CLOB, - status INTEGER, - metadata CLOB, - operation VARCHAR(255), - record_type VARCHAR(255), - description VARCHAR(255), - CONSTRAINT orcid_history_pkey PRIMARY KEY (id), - CONSTRAINT orcid_history_owner_id_fkey FOREIGN KEY (owner_id) REFERENCES item (uuid), - CONSTRAINT orcid_history_entity_id_fkey FOREIGN KEY (entity_id) REFERENCES item (uuid) -); - -CREATE INDEX orcid_history_owner_id_index on orcid_history(owner_id); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.05.16__Orcid_token_table.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.05.16__Orcid_token_table.sql deleted file mode 100644 index 14bf853143..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.05.16__Orcid_token_table.sql +++ /dev/null @@ -1,24 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------------------------------------ --- Create table for ORCID access tokens ------------------------------------------------------------------------------------ - -CREATE SEQUENCE orcid_token_id_seq; - -CREATE TABLE orcid_token -( - id INTEGER NOT NULL, - eperson_id RAW(16) NOT NULL UNIQUE, - profile_item_id RAW(16), - access_token VARCHAR2(100) NOT NULL, - CONSTRAINT orcid_token_pkey PRIMARY KEY (id), - CONSTRAINT orcid_token_eperson_id_fkey FOREIGN KEY (eperson_id) REFERENCES eperson (uuid), - CONSTRAINT orcid_token_profile_item_id_fkey FOREIGN KEY (profile_item_id) REFERENCES item (uuid) -); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.06.16__process_to_group.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.06.16__process_to_group.sql deleted file mode 100644 index 0e7d417ae5..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.06.16__process_to_group.sql +++ /dev/null @@ -1,18 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - -------------------------------------------------------------------------------- --- Table to store Groups related to a Process on its creation -------------------------------------------------------------------------------- - -CREATE TABLE Process2Group -( - process_id INTEGER REFERENCES Process(process_id), - group_id UUID REFERENCES epersongroup (uuid) ON DELETE CASCADE, - CONSTRAINT PK_Process2Group PRIMARY KEY (process_id, group_id) -); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.06.20__add_last_version_status_column_to_relationship_table.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.06.20__add_last_version_status_column_to_relationship_table.sql deleted file mode 100644 index 3eb9ae6dd4..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.3_2022.06.20__add_last_version_status_column_to_relationship_table.sql +++ /dev/null @@ -1,10 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- NOTE: default 0 ensures that existing relations have "latest_version_status" set to "both" (first constant in enum, see Relationship class) -ALTER TABLE relationship ADD latest_version_status INTEGER DEFAULT 0 NOT NULL; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.5_2022.12.01__add_table_subscriptionparamter_change_columns_subscription_table.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.5_2022.12.01__add_table_subscriptionparamter_change_columns_subscription_table.sql deleted file mode 100644 index 3862830230..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.5_2022.12.01__add_table_subscriptionparamter_change_columns_subscription_table.sql +++ /dev/null @@ -1,45 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------------------------------------ --- ADD table subscription_parameter ------------------------------------------------------------------------------------ - - -CREATE SEQUENCE if NOT EXISTS subscription_parameter_seq; ------------------------------------------------------------------------------------ --- ADD table subscription_parameter ------------------------------------------------------------------------------------ -CREATE TABLE if NOT EXISTS subscription_parameter -( - subscription_parameter_id INTEGER NOT NULL, - name VARCHAR(255), - value VARCHAR(255), - subscription_id INTEGER NOT NULL, - CONSTRAINT subscription_parameter_pkey PRIMARY KEY (subscription_parameter_id), - CONSTRAINT subscription_parameter_subscription_fkey FOREIGN KEY (subscription_id) - REFERENCES subscription (subscription_id) ON DELETE CASCADE -); --- -- - -ALTER TABLE subscription ADD COLUMN if NOT EXISTS dspace_object_id UUID; ----- -- -ALTER TABLE subscription ADD COLUMN if NOT EXISTS type CHARACTER VARYING(255); --- -UPDATE subscription SET dspace_object_id = collection_id , type = 'content'; --- -ALTER TABLE subscription DROP CONSTRAINT IF EXISTS subscription_dspaceobject_fkey; -ALTER TABLE subscription ADD CONSTRAINT subscription_dspaceobject_fkey FOREIGN KEY (dspace_object_id) REFERENCES dspaceobject (uuid); --- -ALTER TABLE subscription DROP CONSTRAINT IF EXISTS subscription_collection_id_fkey; ----- -- -ALTER TABLE subscription DROP COLUMN IF EXISTS collection_id; --- -- -INSERT INTO subscription_parameter (subscription_parameter_id, name, value, subscription_id) -SELECT getnextid('subscription_parameter'), 'frequency', 'D', subscription_id from "subscription" ; - diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.5_2022.12.09__Supervision_Orders_table.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.5_2022.12.09__Supervision_Orders_table.sql deleted file mode 100644 index c7bb0b502e..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.5_2022.12.09__Supervision_Orders_table.sql +++ /dev/null @@ -1,78 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - -------------------------------------------------------------------------------- --- Table to store supervision orders -------------------------------------------------------------------------------- - -CREATE TABLE supervision_orders -( - id INTEGER PRIMARY KEY, - item_id UUID REFERENCES Item(uuid) ON DELETE CASCADE, - eperson_group_id UUID REFERENCES epersongroup(uuid) ON DELETE CASCADE -); - -CREATE SEQUENCE supervision_orders_seq; - -INSERT INTO supervision_orders (id, item_id, eperson_group_id) -SELECT supervision_orders_seq.nextval AS id, w.item_id, e.uuid -FROM epersongroup2workspaceitem ew INNER JOIN workspaceitem w -ON ew.workspace_item_id = w.workspace_item_id -INNER JOIN epersongroup e -ON ew.eperson_group_id = e.uuid; - - --- UPDATE policies for supervision orders --- items, bundles and bitstreams - -DECLARE -BEGIN - -FOR rec IN -( -SELECT so.item_id as dspace_object, so.eperson_group_id, rp.resource_type_id -FROM supervision_orders so -INNER JOIN RESOURCEPOLICY rp on so.item_id = rp.dspace_object -AND so.eperson_group_id = rp.epersongroup_id -WHERE rp.rptype IS NULL - -UNION - -SELECT ib.bundle_id as dspace_object, so.eperson_group_id, rp.resource_type_id -FROM supervision_orders so -INNER JOIN item2bundle ib ON so.item_id = ib.item_id -INNER JOIN RESOURCEPOLICY rp on ib.bundle_id = rp.dspace_object -AND so.eperson_group_id = rp.epersongroup_id -WHERE rp.rptype IS NULL - -UNION - -SELECT bs.bitstream_id as dspace_object, so.eperson_group_id, rp.resource_type_id -FROM supervision_orders so -INNER JOIN item2bundle ib ON so.item_id = ib.item_id -INNER JOIN bundle2bitstream bs ON ib.bundle_id = bs.bundle_id -INNER JOIN RESOURCEPOLICY rp on bs.bitstream_id = rp.dspace_object -AND so.eperson_group_id = rp.epersongroup_id -WHERE rp.rptype IS NULL -) - -LOOP - -UPDATE RESOURCEPOLICY SET rptype = 'TYPE_SUBMISSION' -where dspace_object = rec.dspace_object -AND epersongroup_id = rec.eperson_group_id -AND rptype IS NULL; - -END LOOP; -END; - -------------------------------------------------------------------------------- --- drop epersongroup2workspaceitem table -------------------------------------------------------------------------------- - -DROP TABLE epersongroup2workspaceitem; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.5_2022.12.15__system_wide_alerts.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.5_2022.12.15__system_wide_alerts.sql deleted file mode 100644 index 9d13138fda..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.5_2022.12.15__system_wide_alerts.sql +++ /dev/null @@ -1,22 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------------------------------------ --- Create table for System wide alerts ------------------------------------------------------------------------------------ - -CREATE SEQUENCE alert_id_seq; - -CREATE TABLE systemwidealert -( - alert_id INTEGER NOT NULL PRIMARY KEY, - message VARCHAR(512), - allow_sessions VARCHAR(64), - countdown_to TIMESTAMP, - active BOOLEAN -); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.03.24__Update_PNG_in_bitstream_format_registry.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.03.24__Update_PNG_in_bitstream_format_registry.sql deleted file mode 100644 index 8aec44a7f6..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.03.24__Update_PNG_in_bitstream_format_registry.sql +++ /dev/null @@ -1,17 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------------------------------------ --- Update short description for PNG mimetype in the bitstream format registry --- See: https://github.com/DSpace/DSpace/pull/8722 ------------------------------------------------------------------------------------ - -UPDATE bitstreamformatregistry -SET short_description='PNG' -WHERE short_description='image/png' - AND mimetype='image/png'; diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.03.29__orcid_queue_and_history_descriptions_to_text_type.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.03.29__orcid_queue_and_history_descriptions_to_text_type.sql deleted file mode 100644 index 509e0a2869..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/V7.6_2023.03.29__orcid_queue_and_history_descriptions_to_text_type.sql +++ /dev/null @@ -1,10 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - -ALTER TABLE orcid_history MODIFY (description CLOB); -ALTER TABLE orcid_queue MODIFY (description CLOB); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/update-sequences.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/update-sequences.sql deleted file mode 100644 index b4d4d755cb..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/oracle/update-sequences.sql +++ /dev/null @@ -1,77 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- SQL code to update the ID (primary key) generating sequences, if some --- import operation has set explicit IDs. --- --- Sequences are used to generate IDs for new rows in the database. If a --- bulk import operation, such as an SQL dump, specifies primary keys for --- imported data explicitly, the sequences are out of sync and need updating. --- This SQL code does just that. --- --- This should rarely be needed; any bulk import should be performed using the --- org.dspace.content API which is safe to use concurrently and in multiple --- JVMs. The SQL code below will typically only be required after a direct --- SQL data dump from a backup or somesuch. - --- The 'updateseq' procedure was derived from incseq.sql found at: --- http://www.akadia.com/services/scripts/incseq.sql - -DECLARE - PROCEDURE updateseq ( seq IN VARCHAR, - tbl IN VARCHAR, - attr IN VARCHAR, - cond IN VARCHAR DEFAULT '' ) IS - curr NUMBER := 0; - BEGIN - EXECUTE IMMEDIATE 'SELECT max(' || attr - || ') FROM ' || tbl - || ' ' || cond - INTO curr; - curr := curr + 1; - EXECUTE IMMEDIATE 'DROP SEQUENCE ' || seq; - EXECUTE IMMEDIATE 'CREATE SEQUENCE ' - || seq - || ' START WITH ' - || NVL(curr, 1); - END updateseq; - -BEGIN - updateseq('bitstreamformatregistry_seq', 'bitstreamformatregistry', - 'bitstream_format_id'); - updateseq('fileextension_seq', 'fileextension', 'file_extension_id'); - updateseq('resourcepolicy_seq', 'resourcepolicy', 'policy_id'); - updateseq('workspaceitem_seq', 'workspaceitem', 'workspace_item_id'); - updateseq('registrationdata_seq', 'registrationdata', - 'registrationdata_id'); - updateseq('subscription_seq', 'subscription', 'subscription_id'); - updateseq('metadatafieldregistry_seq', 'metadatafieldregistry', - 'metadata_field_id'); - updateseq('metadatavalue_seq', 'metadatavalue', 'metadata_value_id'); - updateseq('metadataschemaregistry_seq', 'metadataschemaregistry', - 'metadata_schema_id'); - updateseq('harvested_collection_seq', 'harvested_collection', 'id'); - updateseq('harvested_item_seq', 'harvested_item', 'id'); - updateseq('webapp_seq', 'webapp', 'webapp_id'); - updateseq('requestitem_seq', 'requestitem', 'requestitem_id'); - updateseq('handle_id_seq', 'handle', 'handle_id'); - - -- Handle Sequence is a special case. Since Handles minted by DSpace - -- use the 'handle_seq', we need to ensure the next assigned handle - -- will *always* be unique. So, 'handle_seq' always needs to be set - -- to the value of the *largest* handle suffix. That way when the - -- next handle is assigned, it will use the next largest number. This - -- query does the following: - -- For all 'handle' values which have a number in their suffix - -- (after '/'), find the maximum suffix value, convert it to a - -- number, and set the 'handle_seq' to start at the next value (see - -- updateseq above for more). - updateseq('handle_seq', 'handle', - q'{to_number(regexp_replace(handle, '.*/', ''), '999999999999')}', - q'{WHERE REGEXP_LIKE(handle, '^.*/[0123456789]*$')}'); -END; \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V5.7_2017.05.05__DS-3431.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V5.7_2017.05.05__DS-3431.sql deleted file mode 100644 index 9bca3a17c9..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V5.7_2017.05.05__DS-3431.sql +++ /dev/null @@ -1,503 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - -------------------------------------------------------------------------- --- DS-3431 Workflow system is vulnerable to unauthorized manipulations -- -------------------------------------------------------------------------- - ------------------------------------------------------------------------ --- grant claiming permissions to all workflow step groups (step 1-3) -- ------------------------------------------------------------------------ -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, epersongroup_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '5' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_1 AS epersongroup_id, - collection_id AS dspace_object - FROM collection - WHERE workflow_step_1 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 5 AND epersongroup_id = workflow_step_1 and resource_id = collection_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, epersongroup_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '6' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_2 AS epersongroup_id, - collection_id AS dspace_object - FROM collection - WHERE workflow_step_2 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 6 AND epersongroup_id = workflow_step_2 and resource_id = collection_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, epersongroup_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '7' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_3 AS epersongroup_id, - collection_id AS dspace_object - FROM collection - WHERE workflow_step_3 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 7 AND epersongroup_id = workflow_step_3 and resource_id = collection_id - ); - ------------------------------------------------------------------------ --- grant add permissions to all workflow step groups (step 1-3) -- ------------------------------------------------------------------------ -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, epersongroup_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_1 AS epersongroup_id, - collection_id AS dspace_object - FROM collection - WHERE workflow_step_1 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 3 AND epersongroup_id = workflow_step_1 and resource_id = collection_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, epersongroup_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_2 AS epersongroup_id, - collection_id AS dspace_object - FROM collection - WHERE workflow_step_2 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 3 AND epersongroup_id = workflow_step_2 and resource_id = collection_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, epersongroup_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_3 AS epersongroup_id, - collection_id AS dspace_object - FROM collection - WHERE workflow_step_3 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 3 AND epersongroup_id = workflow_step_3 and resource_id = collection_id - ); - ----------------------------------------------------------------------------------- --- grant read/write/delete/add/remove permission on workflow items to reviewers -- ----------------------------------------------------------------------------------- -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '0' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 0 AND eperson_id = owner AND resource_id = item_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '1' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 1 AND eperson_id = owner AND resource_id = item_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '2' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 2 AND eperson_id = owner AND resource_id = item_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 3 AND eperson_id = owner AND resource_id = item_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '4' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 4 AND eperson_id = owner AND resource_id = item_id - ); - ------------------------------------------------------------------------------------ --- grant read/write/delete/add/remove permission on Bundle ORIGINAL to reviewers -- ------------------------------------------------------------------------------------ -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '0' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 0 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = i2b.bundle_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '1' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 1 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = i2b.bundle_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '2' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 2 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = i2b.bundle_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 3 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = i2b.bundle_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '4' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 4 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = i2b.bundle_id - ); - - -------------------------------------------------------------------------------- --- grant read/write/delete/add/remove permission on all Bitstreams of Bundle -- --- ORIGINAL to reviewers -- -------------------------------------------------------------------------------- -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '0' AS resource_type_id, - '0' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 0 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = b2b.bitstream_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '0' AS resource_type_id, - '1' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 1 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = b2b.bitstream_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL policy_id, - '0' AS resource_type_id, - '2' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 2 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = b2b.bitstream_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '0' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 3 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = b2b.bitstream_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, resource_id) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '0' AS resource_type_id, - '4' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.resource_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 4 AND resourcepolicy.eperson_id = owner AND resourcepolicy.resource_id = b2b.bitstream_id - ); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V6.0_2015.08.11__DS-2701_Basic_Workflow_Migration.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V6.0_2015.08.11__DS-2701_Basic_Workflow_Migration.sql deleted file mode 100644 index 917078594c..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V6.0_2015.08.11__DS-2701_Basic_Workflow_Migration.sql +++ /dev/null @@ -1,37 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS-2701 Service based API / Hibernate integration ------------------------------------------------------- --- Alter workflow item -ALTER TABLE workflowitem RENAME COLUMN item_id to item_legacy_id; -ALTER TABLE workflowitem ADD item_id RAW(16) REFERENCES Item(uuid); -UPDATE workflowitem SET item_id = (SELECT item.uuid FROM item WHERE workflowitem.item_legacy_id = item.item_id); -ALTER TABLE workflowitem DROP COLUMN item_legacy_id; - --- Migrate task list item -ALTER TABLE TasklistItem RENAME COLUMN eperson_id to eperson_legacy_id; -ALTER TABLE TasklistItem ADD eperson_id RAW(16) REFERENCES EPerson(uuid); -UPDATE TasklistItem SET eperson_id = (SELECT eperson.uuid FROM eperson WHERE TasklistItem.eperson_legacy_id = eperson.eperson_id); -ALTER TABLE TasklistItem DROP COLUMN eperson_legacy_id; - --- Migrate task workflow item -ALTER TABLE workflowitem RENAME COLUMN collection_id to collection_legacy_id; -ALTER TABLE workflowitem ADD collection_id RAW(16) REFERENCES Collection(uuid); -UPDATE workflowitem SET collection_id = (SELECT collection.uuid FROM collection WHERE workflowitem.collection_legacy_id = collection.collection_id); -ALTER TABLE workflowitem DROP COLUMN collection_legacy_id; -ALTER TABLE workflowitem RENAME COLUMN owner to owner_legacy_id; -ALTER TABLE workflowitem ADD owner RAW(16) REFERENCES EPerson (uuid); -UPDATE workflowitem SET owner = (SELECT eperson.uuid FROM eperson WHERE workflowitem.owner_legacy_id = eperson.eperson_id); -ALTER TABLE workflowitem DROP COLUMN owner_legacy_id; -UPDATE workflowitem SET state = -1 WHERE state IS NULL; -UPDATE workflowitem SET multiple_titles = '0' WHERE multiple_titles IS NULL; -UPDATE workflowitem SET published_before = '0' WHERE published_before IS NULL; -UPDATE workflowitem SET multiple_files = '0' WHERE multiple_files IS NULL; - diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V6.1_2017.01.03__DS-3431.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V6.1_2017.01.03__DS-3431.sql deleted file mode 100644 index b3887a5af4..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/basicWorkflow/V6.1_2017.01.03__DS-3431.sql +++ /dev/null @@ -1,503 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - -------------------------------------------------------------------------- --- DS-3431 Workflow system is vulnerable to unauthorized manipulations -- -------------------------------------------------------------------------- - ------------------------------------------------------------------------ --- grant claiming permissions to all workflow step groups (step 1-3) -- ------------------------------------------------------------------------ -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, epersongroup_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '5' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_1 AS epersongroup_id, - uuid AS dspace_object - FROM collection - WHERE workflow_step_1 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 5 AND epersongroup_id = workflow_step_1 and dspace_object = uuid - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, epersongroup_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '6' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_2 AS epersongroup_id, - uuid AS dspace_object - FROM collection - WHERE workflow_step_2 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 6 AND epersongroup_id = workflow_step_2 and dspace_object = uuid - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, epersongroup_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '7' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_3 AS epersongroup_id, - uuid AS dspace_object - FROM collection - WHERE workflow_step_3 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 7 AND epersongroup_id = workflow_step_3 and dspace_object = uuid - ); - ------------------------------------------------------------------------ --- grant add permissions to all workflow step groups (step 1-3) -- ------------------------------------------------------------------------ -INSERT INTO resourcepolicy -(policy_id, resource_type_id, action_id, rptype, epersongroup_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_1 AS epersongroup_id, - uuid AS dspace_object - FROM collection - WHERE workflow_step_1 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 3 AND epersongroup_id = workflow_step_1 and dspace_object = uuid - ); - -INSERT INTO resourcepolicy -(policy_id, resource_type_id, action_id, rptype, epersongroup_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_2 AS epersongroup_id, - uuid AS dspace_object - FROM collection - WHERE workflow_step_2 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 3 AND epersongroup_id = workflow_step_2 and dspace_object = uuid - ); - -INSERT INTO resourcepolicy -(policy_id, resource_type_id, action_id, rptype, epersongroup_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '3' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - workflow_step_3 AS epersongroup_id, - uuid AS dspace_object - FROM collection - WHERE workflow_step_3 IS NOT NULL - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 3 AND action_id = 3 AND epersongroup_id = workflow_step_3 and dspace_object = uuid - ); - ----------------------------------------------------------------------------------- --- grant read/write/delete/add/remove permission on workflow items to reviewers -- ----------------------------------------------------------------------------------- -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '0' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 0 AND eperson_id = owner AND dspace_object = item_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '1' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 1 AND eperson_id = owner AND dspace_object = item_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '2' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 2 AND eperson_id = owner AND dspace_object = item_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 3 AND eperson_id = owner AND dspace_object = item_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '2' AS resource_type_id, - '4' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - owner AS eperson_id, - item_id AS dspace_object - FROM workflowitem - WHERE - owner IS NOT NULL - AND (state = 2 OR state = 4 OR state = 6) - AND NOT EXISTS ( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 2 AND action_id = 4 AND eperson_id = owner AND dspace_object = item_id - ); - ------------------------------------------------------------------------------------ --- grant read/write/delete/add/remove permission on Bundle ORIGINAL to reviewers -- ------------------------------------------------------------------------------------ -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '0' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 0 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = i2b.bundle_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '1' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 1 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = i2b.bundle_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '2' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 2 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = i2b.bundle_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 3 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = i2b.bundle_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '1' AS resource_type_id, - '4' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - i2b.bundle_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 1 AND action_id = 4 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = i2b.bundle_id - ); - - -------------------------------------------------------------------------------- --- grant read/write/delete/add/remove permission on all Bitstreams of Bundle -- --- ORIGINAL to reviewers -- -------------------------------------------------------------------------------- -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '0' AS resource_type_id, - '0' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 0 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = b2b.bitstream_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '0' AS resource_type_id, - '1' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 1 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = b2b.bitstream_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL policy_id, - '0' AS resource_type_id, - '2' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 2 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = b2b.bitstream_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '0' AS resource_type_id, - '3' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 3 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = b2b.bitstream_id - ); - -INSERT INTO resourcepolicy - (policy_id, resource_type_id, action_id, rptype, eperson_id, dspace_object) - SELECT - resourcepolicy_seq.NEXTVAL AS policy_id, - '0' AS resource_type_id, - '4' AS action_id, - 'TYPE_WORKFLOW' AS rptype, - wfi.owner AS eperson_id, - b2b.bitstream_id AS dspace_object - FROM workflowitem wfi - JOIN item2bundle i2b - ON i2b.item_id = wfi.item_id - JOIN bundle2bitstream b2b - ON b2b.bundle_id = i2b.bundle_id - JOIN metadatavalue mv - ON mv.dspace_object_id = i2b.bundle_id - JOIN metadatafieldregistry mfr - ON mv.metadata_field_id = mfr.metadata_field_id - JOIN metadataschemaregistry msr - ON mfr.metadata_schema_id = msr.metadata_schema_id - WHERE - msr.namespace = 'http://dublincore.org/documents/dcmi-terms/' - AND mfr.element = 'title' - AND mfr.qualifier IS NULL - AND mv.text_value LIKE 'ORIGINAL' - AND wfi.owner IS NOT NULL - AND (wfi.state = 2 OR wfi.state = 4 OR wfi.state = 6) - AND NOT EXISTS( - SELECT 1 FROM resourcepolicy WHERE resource_type_id = 0 AND action_id = 4 AND resourcepolicy.eperson_id = owner AND resourcepolicy.dspace_object = b2b.bitstream_id - ); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/xmlworkflow/V6.0_2015.08.11__DS-2701_Xml_Workflow_Migration.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/xmlworkflow/V6.0_2015.08.11__DS-2701_Xml_Workflow_Migration.sql deleted file mode 100644 index 7a992836ee..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/xmlworkflow/V6.0_2015.08.11__DS-2701_Xml_Workflow_Migration.sql +++ /dev/null @@ -1,141 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ------------------------------------------------------- --- DS-2701 Service based API / Hibernate integration ------------------------------------------------------- -UPDATE collection SET workflow_step_1 = null; -UPDATE collection SET workflow_step_2 = null; -UPDATE collection SET workflow_step_3 = null; - --- cwf_workflowitem - -DROP INDEX cwf_workflowitem_coll_fk_idx; - -ALTER TABLE cwf_workflowitem RENAME COLUMN item_id to item_legacy_id; -ALTER TABLE cwf_workflowitem ADD item_id RAW(16) REFERENCES Item(uuid); -UPDATE cwf_workflowitem SET item_id = (SELECT item.uuid FROM item WHERE cwf_workflowitem.item_legacy_id = item.item_id); -ALTER TABLE cwf_workflowitem DROP COLUMN item_legacy_id; - -ALTER TABLE cwf_workflowitem RENAME COLUMN collection_id to collection_legacy_id; -ALTER TABLE cwf_workflowitem ADD collection_id RAW(16) REFERENCES Collection(uuid); -UPDATE cwf_workflowitem SET collection_id = (SELECT collection.uuid FROM collection WHERE cwf_workflowitem.collection_legacy_id = collection.collection_id); -ALTER TABLE cwf_workflowitem DROP COLUMN collection_legacy_id; - -UPDATE cwf_workflowitem SET multiple_titles = '0' WHERE multiple_titles IS NULL; -UPDATE cwf_workflowitem SET published_before = '0' WHERE published_before IS NULL; -UPDATE cwf_workflowitem SET multiple_files = '0' WHERE multiple_files IS NULL; - -CREATE INDEX cwf_workflowitem_coll_fk_idx ON cwf_workflowitem(collection_id); - --- cwf_collectionrole - -ALTER TABLE cwf_collectionrole DROP CONSTRAINT cwf_collectionrole_unique; -DROP INDEX cwf_cr_coll_role_fk_idx; -DROP INDEX cwf_cr_coll_fk_idx; - -ALTER TABLE cwf_collectionrole RENAME COLUMN collection_id to collection_legacy_id; -ALTER TABLE cwf_collectionrole ADD collection_id RAW(16) REFERENCES Collection(uuid); -UPDATE cwf_collectionrole SET collection_id = (SELECT collection.uuid FROM collection WHERE cwf_collectionrole.collection_legacy_id = collection.collection_id); -ALTER TABLE cwf_collectionrole DROP COLUMN collection_legacy_id; - -ALTER TABLE cwf_collectionrole RENAME COLUMN group_id to group_legacy_id; -ALTER TABLE cwf_collectionrole ADD group_id RAW(16) REFERENCES epersongroup(uuid); -UPDATE cwf_collectionrole SET group_id = (SELECT epersongroup.uuid FROM epersongroup WHERE cwf_collectionrole.group_legacy_id = epersongroup.eperson_group_id); -ALTER TABLE cwf_collectionrole DROP COLUMN group_legacy_id; - -ALTER TABLE cwf_collectionrole -ADD CONSTRAINT cwf_collectionrole_unique UNIQUE (role_id, collection_id, group_id); - -CREATE INDEX cwf_cr_coll_role_fk_idx ON cwf_collectionrole(collection_id,role_id); -CREATE INDEX cwf_cr_coll_fk_idx ON cwf_collectionrole(collection_id); - - --- cwf_workflowitemrole - -ALTER TABLE cwf_workflowitemrole DROP CONSTRAINT cwf_workflowitemrole_unique; -DROP INDEX cwf_wfir_item_role_fk_idx; -DROP INDEX cwf_wfir_item_fk_idx; - -ALTER TABLE cwf_workflowitemrole RENAME COLUMN group_id to group_legacy_id; -ALTER TABLE cwf_workflowitemrole ADD group_id RAW(16) REFERENCES epersongroup(uuid); -UPDATE cwf_workflowitemrole SET group_id = (SELECT epersongroup.uuid FROM epersongroup WHERE cwf_workflowitemrole.group_legacy_id = epersongroup.eperson_group_id); -ALTER TABLE cwf_workflowitemrole DROP COLUMN group_legacy_id; - -ALTER TABLE cwf_workflowitemrole RENAME COLUMN eperson_id to eperson_legacy_id; -ALTER TABLE cwf_workflowitemrole ADD eperson_id RAW(16) REFERENCES eperson(uuid); -UPDATE cwf_workflowitemrole SET eperson_id = (SELECT eperson.uuid FROM eperson WHERE cwf_workflowitemrole.eperson_legacy_id = eperson.eperson_id); -ALTER TABLE cwf_workflowitemrole DROP COLUMN eperson_legacy_id; - - -ALTER TABLE cwf_workflowitemrole -ADD CONSTRAINT cwf_workflowitemrole_unique UNIQUE (role_id, workflowitem_id, eperson_id, group_id); - -CREATE INDEX cwf_wfir_item_role_fk_idx ON cwf_workflowitemrole(workflowitem_id,role_id); -CREATE INDEX cwf_wfir_item_fk_idx ON cwf_workflowitemrole(workflowitem_id); - --- cwf_pooltask - -DROP INDEX cwf_pt_eperson_fk_idx; -DROP INDEX cwf_pt_workflow_eperson_fk_idx; - -ALTER TABLE cwf_pooltask RENAME COLUMN group_id to group_legacy_id; -ALTER TABLE cwf_pooltask ADD group_id RAW(16) REFERENCES epersongroup(uuid); -UPDATE cwf_pooltask SET group_id = (SELECT epersongroup.uuid FROM epersongroup WHERE cwf_pooltask.group_legacy_id = epersongroup.eperson_group_id); -ALTER TABLE cwf_pooltask DROP COLUMN group_legacy_id; - -ALTER TABLE cwf_pooltask RENAME COLUMN eperson_id to eperson_legacy_id; -ALTER TABLE cwf_pooltask ADD eperson_id RAW(16) REFERENCES eperson(uuid); -UPDATE cwf_pooltask SET eperson_id = (SELECT eperson.uuid FROM eperson WHERE cwf_pooltask.eperson_legacy_id = eperson.eperson_id); -ALTER TABLE cwf_pooltask DROP COLUMN eperson_legacy_id; - -CREATE INDEX cwf_pt_eperson_fk_idx ON cwf_pooltask(eperson_id); -CREATE INDEX cwf_pt_workflow_eperson_fk_idx ON cwf_pooltask(eperson_id,workflowitem_id); - --- cwf_claimtask - -ALTER TABLE cwf_claimtask DROP CONSTRAINT cwf_claimtask_unique; -DROP INDEX cwf_ct_workflow_fk_idx; -DROP INDEX cwf_ct_workflow_eperson_fk_idx; -DROP INDEX cwf_ct_eperson_fk_idx; -DROP INDEX cwf_ct_wfs_fk_idx; -DROP INDEX cwf_ct_wfs_action_fk_idx; -DROP INDEX cwf_ct_wfs_action_e_fk_idx; - -ALTER TABLE cwf_claimtask RENAME COLUMN owner_id to eperson_legacy_id; -ALTER TABLE cwf_claimtask ADD owner_id RAW(16) REFERENCES eperson(uuid); -UPDATE cwf_claimtask SET owner_id = (SELECT eperson.uuid FROM eperson WHERE cwf_claimtask.eperson_legacy_id = eperson.eperson_id); -ALTER TABLE cwf_claimtask DROP COLUMN eperson_legacy_id; - -ALTER TABLE cwf_claimtask -ADD CONSTRAINT cwf_claimtask_unique UNIQUE (step_id, workflowitem_id, workflow_id, owner_id, action_id); - -CREATE INDEX cwf_ct_workflow_fk_idx ON cwf_claimtask(workflowitem_id); -CREATE INDEX cwf_ct_workflow_eperson_fk_idx ON cwf_claimtask(workflowitem_id,owner_id); -CREATE INDEX cwf_ct_eperson_fk_idx ON cwf_claimtask(owner_id); -CREATE INDEX cwf_ct_wfs_fk_idx ON cwf_claimtask(workflowitem_id,step_id); -CREATE INDEX cwf_ct_wfs_action_fk_idx ON cwf_claimtask(workflowitem_id,step_id,action_id); -CREATE INDEX cwf_ct_wfs_action_e_fk_idx ON cwf_claimtask(workflowitem_id,step_id,action_id,owner_id); - --- cwf_in_progress_user - -ALTER TABLE cwf_in_progress_user DROP CONSTRAINT cwf_in_progress_user_unique; -DROP INDEX cwf_ipu_workflow_fk_idx; -DROP INDEX cwf_ipu_eperson_fk_idx; - -ALTER TABLE cwf_in_progress_user RENAME COLUMN user_id to eperson_legacy_id; -ALTER TABLE cwf_in_progress_user ADD user_id RAW(16) REFERENCES eperson(uuid); -UPDATE cwf_in_progress_user SET user_id = (SELECT eperson.uuid FROM eperson WHERE cwf_in_progress_user.eperson_legacy_id = eperson.eperson_id); -ALTER TABLE cwf_in_progress_user DROP COLUMN eperson_legacy_id; -UPDATE cwf_in_progress_user SET finished = '0' WHERE finished IS NULL; - -ALTER TABLE cwf_in_progress_user -ADD CONSTRAINT cwf_in_progress_user_unique UNIQUE (workflowitem_id, user_id); - -CREATE INDEX cwf_ipu_workflow_fk_idx ON cwf_in_progress_user(workflowitem_id); -CREATE INDEX cwf_ipu_eperson_fk_idx ON cwf_in_progress_user(user_id); \ No newline at end of file diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/xmlworkflow/V7.0_2018.04.03__upgrade_workflow_policy.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/xmlworkflow/V7.0_2018.04.03__upgrade_workflow_policy.sql deleted file mode 100644 index 0402fc9948..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/sqlmigration/workflow/oracle/xmlworkflow/V7.0_2018.04.03__upgrade_workflow_policy.sql +++ /dev/null @@ -1,27 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - --- UPDATE policies for claimtasks --- Item -UPDATE RESOURCEPOLICY SET rptype = 'TYPE_WORKFLOW' WHERE dspace_object in (SELECT cwf_workflowitem.item_id FROM cwf_workflowitem INNER JOIN cwf_claimtask ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id JOIN item ON cwf_workflowitem.item_id = item.uuid) AND eperson_id not in (SELECT item.submitter_id FROM cwf_workflowitem JOIN item ON cwf_workflowitem.item_id = item.uuid); - --- Bundles -UPDATE RESOURCEPOLICY SET rptype = 'TYPE_WORKFLOW' WHERE dspace_object in (SELECT item2bundle.bundle_id FROM cwf_workflowitem INNER JOIN cwf_claimtask ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id) AND eperson_id not in (SELECT item.submitter_id FROM cwf_workflowitem JOIN item ON cwf_workflowitem.item_id = item.uuid); - --- Bitstreams -UPDATE RESOURCEPOLICY SET rptype = 'TYPE_WORKFLOW' WHERE dspace_object in (SELECT bundle2bitstream.bitstream_id FROM cwf_workflowitem INNER JOIN cwf_claimtask ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id) AND eperson_id not in (SELECT item.submitter_id FROM cwf_workflowitem JOIN item ON cwf_workflowitem.item_id = item.uuid); - --- Create policies for pooled tasks --- Item -UPDATE RESOURCEPOLICY SET rptype = 'TYPE_WORKFLOW' WHERE dspace_object in (SELECT cwf_workflowitem.item_id FROM cwf_workflowitem INNER JOIN cwf_pooltask ON cwf_workflowitem.workflowitem_id = cwf_pooltask.workflowitem_id) AND eperson_id not in (SELECT item.submitter_id FROM cwf_workflowitem JOIN item ON cwf_workflowitem.item_id = item.uuid); - --- Bundles -UPDATE RESOURCEPOLICY SET rptype = 'TYPE_WORKFLOW' WHERE dspace_object in (SELECT cwf_workflowitem.item_id FROM cwf_workflowitem INNER JOIN cwf_pooltask ON cwf_workflowitem.workflowitem_id = cwf_pooltask.workflowitem_id INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id) AND eperson_id not in (SELECT item.submitter_id FROM cwf_workflowitem JOIN item ON cwf_workflowitem.item_id = item.uuid); - --- Bitstreams -UPDATE RESOURCEPOLICY SET rptype = 'TYPE_WORKFLOW' WHERE dspace_object in (SELECT cwf_workflowitem.item_id FROM cwf_workflowitem INNER JOIN cwf_pooltask ON cwf_workflowitem.workflowitem_id = cwf_pooltask.workflowitem_id INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id) AND eperson_id not in (SELECT item.submitter_id FROM cwf_workflowitem JOIN item ON cwf_workflowitem.item_id = item.uuid); diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/data_workflow_migration.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/data_workflow_migration.sql deleted file mode 100644 index f582f37c69..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/data_workflow_migration.sql +++ /dev/null @@ -1,377 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ----------------------------------------------------- --- Data Migration for XML/Configurable Workflow --- --- This file will automatically migrate existing --- classic workflows to XML/Configurable workflows. --- NOTE however that the corresponding --- "xml_workflow_migration.sql" script must FIRST be --- called to create the appropriate database tables. --- --- This script is called automatically by the following --- Flyway Java migration class: --- org.dspace.storage.rdbms.migration.V5_0_2014_01_01__XMLWorkflow_Migration ----------------------------------------------------- - --- Convert workflow groups: --- TODO: is 'to_number' ok? do not forget to change role_id values - -INSERT INTO cwf_collectionrole (collectionrole_id, role_id, group_id, collection_id) -SELECT -cwf_collectionrole_seq.nextval as collectionrole_id, -'reviewer' AS role_id, -collection.workflow_step_1 AS group_id, -collection.collection_id AS collection_id -FROM collection -WHERE collection.workflow_step_1 IS NOT NULL; - -INSERT INTO cwf_collectionrole (collectionrole_id, role_id, group_id, collection_id) -SELECT -cwf_collectionrole_seq.nextval as collectionrole_id, -'editor' AS role_id, -collection.workflow_step_2 AS group_id, -collection.collection_id AS collection_id -FROM collection -WHERE collection.workflow_step_2 IS NOT NULL; - -INSERT INTO cwf_collectionrole (collectionrole_id, role_id, group_id, collection_id) -SELECT -cwf_collectionrole_seq.nextval as collectionrole_id, -'finaleditor' AS role_id, -collection.workflow_step_3 AS group_id, -collection.collection_id AS collection_id -FROM collection -WHERE collection.workflow_step_3 IS NOT NULL; - - --- Migrate workflow items -INSERT INTO cwf_workflowitem (workflowitem_id, item_id, collection_id, multiple_titles, published_before, multiple_files) -SELECT -workflow_id AS workflowitem_id, -item_id, -collection_id, -multiple_titles, -published_before, -multiple_files -FROM workflowitem; - - --- Migrate claimed tasks -INSERT INTO cwf_claimtask (claimtask_id,workflowitem_id, workflow_id, step_id, action_id, owner_id) -SELECT -cwf_claimtask_seq.nextval AS claimtask_id, -workflow_id AS workflowitem_id, -'default' AS workflow_id, -'reviewstep' AS step_id, -'reviewaction' AS action_id, -owner AS owner_id -FROM workflowitem WHERE owner IS NOT NULL AND state = 2; - -INSERT INTO cwf_claimtask (claimtask_id,workflowitem_id, workflow_id, step_id, action_id, owner_id) -SELECT -cwf_claimtask_seq.nextval AS claimtask_id, -workflow_id AS workflowitem_id, -'default' AS workflow_id, -'editstep' AS step_id, -'editaction' AS action_id, -owner AS owner_id -FROM workflowitem WHERE owner IS NOT NULL AND state = 4; - -INSERT INTO cwf_claimtask (claimtask_id,workflowitem_id, workflow_id, step_id, action_id, owner_id) -SELECT -cwf_claimtask_seq.nextval AS claimtask_id, -workflow_id AS workflowitem_id, -'default' AS workflow_id, -'finaleditstep' AS step_id, -'finaleditaction' AS action_id, -owner AS owner_id -FROM workflowitem WHERE owner IS NOT NULL AND state = 6; - - --- Migrate pooled tasks -INSERT INTO cwf_pooltask (pooltask_id,workflowitem_id, workflow_id, step_id, action_id, group_id) -SELECT -cwf_pooltask_seq.nextval AS pooltask_id, -workflowitem.workflow_id AS workflowitem_id, -'default' AS workflow_id, -'reviewstep' AS step_id, -'claimaction' AS action_id, -cwf_collectionrole.group_id AS group_id -FROM workflowitem INNER JOIN cwf_collectionrole ON workflowitem.collection_id = cwf_collectionrole.collection_id -WHERE workflowitem.owner IS NULL AND workflowitem.state = 1 AND cwf_collectionrole.role_id = 'reviewer'; - -INSERT INTO cwf_pooltask (pooltask_id,workflowitem_id, workflow_id, step_id, action_id, group_id) -SELECT -cwf_pooltask_seq.nextval AS pooltask_id, -workflowitem.workflow_id AS workflowitem_id, -'default' AS workflow_id, -'editstep' AS step_id, -'claimaction' AS action_id, -cwf_collectionrole.group_id AS group_id -FROM workflowitem INNER JOIN cwf_collectionrole ON workflowitem.collection_id = cwf_collectionrole.collection_id -WHERE workflowitem.owner IS NULL AND workflowitem.state = 3 AND cwf_collectionrole.role_id = 'editor'; - -INSERT INTO cwf_pooltask (pooltask_id,workflowitem_id, workflow_id, step_id, action_id, group_id) -SELECT -cwf_pooltask_seq.nextval AS pooltask_id, -workflowitem.workflow_id AS workflowitem_id, -'default' AS workflow_id, -'finaleditstep' AS step_id, -'claimaction' AS action_id, -cwf_collectionrole.group_id AS group_id -FROM workflowitem INNER JOIN cwf_collectionrole ON workflowitem.collection_id = cwf_collectionrole.collection_id -WHERE workflowitem.owner IS NULL AND workflowitem.state = 5 AND cwf_collectionrole.role_id = 'finaleditor'; - --- Delete resource policies for workflowitems before creating new ones -DELETE FROM resourcepolicy -WHERE resource_type_id = 2 AND resource_id IN - (SELECT item_id FROM workflowitem); - -DELETE FROM resourcepolicy -WHERE resource_type_id = 1 AND resource_id IN - (SELECT item2bundle.bundle_id FROM - (workflowitem INNER JOIN item2bundle ON workflowitem.item_id = item2bundle.item_id)); - -DELETE FROM resourcepolicy -WHERE resource_type_id = 0 AND resource_id IN - (SELECT bundle2bitstream.bitstream_id FROM - ((workflowitem INNER JOIN item2bundle ON workflowitem.item_id = item2bundle.item_id) - INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id)); --- Create policies for claimtasks --- public static final int BITSTREAM = 0; --- public static final int BUNDLE = 1; --- public static final int ITEM = 2; - --- public static final int READ = 0; --- public static final int WRITE = 1; --- public static final int DELETE = 2; --- public static final int ADD = 3; --- public static final int REMOVE = 4; --- Item --- TODO: getnextID == SELECT sequence.nextval FROM DUAL!! --- Create a temporarty table with action ID's -CREATE TABLE temptable( - action_id INTEGER PRIMARY KEY -); -INSERT ALL - INTO temptable (action_id) VALUES (0) - INTO temptable (action_id) VALUES (1) - INTO temptable (action_id) VALUES (2) - INTO temptable (action_id) VALUES (3) - INTO temptable (action_id) VALUES (4) -SELECT * FROM DUAL; - -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -2 AS resource_type_id, -cwf_workflowitem.item_id AS resource_id, -temptable.action_id AS action_id, -cwf_claimtask.owner_id AS eperson_id -FROM (cwf_workflowitem INNER JOIN cwf_claimtask ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id), -temptable; - --- Bundles -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -1 AS resource_type_id, -item2bundle.bundle_id AS resource_id, -temptable.action_id AS action_id, -cwf_claimtask.owner_id AS eperson_id -FROM -( - (cwf_workflowitem INNER JOIN cwf_claimtask ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id -), temptable; - - --- Bitstreams -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -0 AS resource_type_id, -bundle2bitstream.bitstream_id AS resource_id, -temptable.action_id AS action_id, -cwf_claimtask.owner_id AS eperson_id -FROM -( - ((cwf_workflowitem INNER JOIN cwf_claimtask ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id) - INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id -), temptable; - - --- Create policies for pooled tasks - -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, epersongroup_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -2 AS resource_type_id, -cwf_workflowitem.item_id AS resource_id, -temptable.action_id AS action_id, -cwf_pooltask.group_id AS epersongroup_id -FROM (cwf_workflowitem INNER JOIN cwf_pooltask ON cwf_workflowitem.workflowitem_id = cwf_pooltask.workflowitem_id), -temptable; - --- Bundles -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, epersongroup_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -1 AS resource_type_id, -item2bundle.bundle_id AS resource_id, -temptable.action_id AS action_id, -cwf_pooltask.group_id AS epersongroup_id -FROM -( - (cwf_workflowitem INNER JOIN cwf_pooltask ON cwf_workflowitem.workflowitem_id = cwf_pooltask.workflowitem_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id -), temptable; - --- Bitstreams -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, epersongroup_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -0 AS resource_type_id, -bundle2bitstream.bitstream_id AS resource_id, -temptable.action_id AS action_id, -cwf_pooltask.group_id AS epersongroup_id -FROM -( - ((cwf_workflowitem INNER JOIN cwf_pooltask ON cwf_workflowitem.workflowitem_id = cwf_pooltask.workflowitem_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id) - INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id -), temptable; - --- Drop the temporary table with the action ID's -DROP TABLE temptable; - --- Create policies for submitter --- TODO: only add if unique -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -2 AS resource_type_id, -cwf_workflowitem.item_id AS resource_id, -0 AS action_id, -item.submitter_id AS eperson_id -FROM (cwf_workflowitem INNER JOIN item ON cwf_workflowitem.item_id = item.item_id); - -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -1 AS resource_type_id, -item2bundle.bundle_id AS resource_id, -0 AS action_id, -item.submitter_id AS eperson_id -FROM ((cwf_workflowitem INNER JOIN item ON cwf_workflowitem.item_id = item.item_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id - ); - -INSERT INTO resourcepolicy (policy_id, resource_type_id, resource_id, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -0 AS resource_type_id, -bundle2bitstream.bitstream_id AS resource_id, -0 AS action_id, -item.submitter_id AS eperson_id -FROM (((cwf_workflowitem INNER JOIN item ON cwf_workflowitem.item_id = item.item_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id) - INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id -); - --- TODO: not tested yet -INSERT INTO cwf_in_progress_user (in_progress_user_id, workflowitem_id, user_id, finished) -SELECT - cwf_in_progress_user_seq.nextval AS in_progress_user_id, - cwf_workflowitem.workflowitem_id AS workflowitem_id, - cwf_claimtask.owner_id AS user_id, - 0 as finished -FROM - (cwf_claimtask INNER JOIN cwf_workflowitem ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id); - --- TODO: improve this, important is NVL(curr, 1)!! without this function, empty tables (max = [null]) will only result in sequence deletion -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(workflowitem_id) INTO curr FROM cwf_workflowitem; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_workflowitem_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_workflowitem_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(collectionrole_id) INTO curr FROM cwf_collectionrole; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_collectionrole_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_collectionrole_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(workflowitemrole_id) INTO curr FROM cwf_workflowitemrole; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_workflowitemrole_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_workflowitemrole_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(pooltask_id) INTO curr FROM cwf_pooltask; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_pooltask_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_pooltask_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(claimtask_id) INTO curr FROM cwf_claimtask; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_claimtask_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_claimtask_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(in_progress_user_id) INTO curr FROM cwf_in_progress_user; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_in_progress_user_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_in_progress_user_seq START WITH ' || NVL(curr, 1); -END; -/ diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/v6.0__DS-2701_data_workflow_migration.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/v6.0__DS-2701_data_workflow_migration.sql deleted file mode 100644 index 70eb419d8f..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/v6.0__DS-2701_data_workflow_migration.sql +++ /dev/null @@ -1,377 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ----------------------------------------------------- --- Data Migration for XML/Configurable Workflow --- --- This file will automatically migrate existing --- classic workflows to XML/Configurable workflows. --- NOTE however that the corresponding --- "xml_workflow_migration.sql" script must FIRST be --- called to create the appropriate database tables. --- --- This script is called automatically by the following --- Flyway Java migration class: --- org.dspace.storage.rdbms.xmlworkflow.V6_0_2015_09_01__DS_2701_Enable_XMLWorkflow_Migration ----------------------------------------------------- - --- Convert workflow groups: --- TODO: is 'to_number' ok? do not forget to change role_id values - -INSERT INTO cwf_collectionrole (collectionrole_id, role_id, group_id, collection_id) -SELECT -cwf_collectionrole_seq.nextval as collectionrole_id, -'reviewer' AS role_id, -collection.workflow_step_1 AS group_id, -collection.uuid AS collection_id -FROM collection -WHERE collection.workflow_step_1 IS NOT NULL; - -INSERT INTO cwf_collectionrole (collectionrole_id, role_id, group_id, collection_id) -SELECT -cwf_collectionrole_seq.nextval as collectionrole_id, -'editor' AS role_id, -collection.workflow_step_2 AS group_id, -collection.uuid AS collection_id -FROM collection -WHERE collection.workflow_step_2 IS NOT NULL; - -INSERT INTO cwf_collectionrole (collectionrole_id, role_id, group_id, collection_id) -SELECT -cwf_collectionrole_seq.nextval as collectionrole_id, -'finaleditor' AS role_id, -collection.workflow_step_3 AS group_id, -collection.uuid AS collection_id -FROM collection -WHERE collection.workflow_step_3 IS NOT NULL; - - --- Migrate workflow items -INSERT INTO cwf_workflowitem (workflowitem_id, item_id, collection_id, multiple_titles, published_before, multiple_files) -SELECT -workflow_id AS workflowitem_id, -item_id, -collection_id, -multiple_titles, -published_before, -multiple_files -FROM workflowitem; - - --- Migrate claimed tasks -INSERT INTO cwf_claimtask (claimtask_id,workflowitem_id, workflow_id, step_id, action_id, owner_id) -SELECT -cwf_claimtask_seq.nextval AS claimtask_id, -workflow_id AS workflowitem_id, -'default' AS workflow_id, -'reviewstep' AS step_id, -'reviewaction' AS action_id, -owner AS owner_id -FROM workflowitem WHERE owner IS NOT NULL AND state = 2; - -INSERT INTO cwf_claimtask (claimtask_id,workflowitem_id, workflow_id, step_id, action_id, owner_id) -SELECT -cwf_claimtask_seq.nextval AS claimtask_id, -workflow_id AS workflowitem_id, -'default' AS workflow_id, -'editstep' AS step_id, -'editaction' AS action_id, -owner AS owner_id -FROM workflowitem WHERE owner IS NOT NULL AND state = 4; - -INSERT INTO cwf_claimtask (claimtask_id,workflowitem_id, workflow_id, step_id, action_id, owner_id) -SELECT -cwf_claimtask_seq.nextval AS claimtask_id, -workflow_id AS workflowitem_id, -'default' AS workflow_id, -'finaleditstep' AS step_id, -'finaleditaction' AS action_id, -owner AS owner_id -FROM workflowitem WHERE owner IS NOT NULL AND state = 6; - - --- Migrate pooled tasks -INSERT INTO cwf_pooltask (pooltask_id,workflowitem_id, workflow_id, step_id, action_id, group_id) -SELECT -cwf_pooltask_seq.nextval AS pooltask_id, -workflowitem.workflow_id AS workflowitem_id, -'default' AS workflow_id, -'reviewstep' AS step_id, -'claimaction' AS action_id, -cwf_collectionrole.group_id AS group_id -FROM workflowitem INNER JOIN cwf_collectionrole ON workflowitem.collection_id = cwf_collectionrole.collection_id -WHERE workflowitem.owner IS NULL AND workflowitem.state = 1 AND cwf_collectionrole.role_id = 'reviewer'; - -INSERT INTO cwf_pooltask (pooltask_id,workflowitem_id, workflow_id, step_id, action_id, group_id) -SELECT -cwf_pooltask_seq.nextval AS pooltask_id, -workflowitem.workflow_id AS workflowitem_id, -'default' AS workflow_id, -'editstep' AS step_id, -'claimaction' AS action_id, -cwf_collectionrole.group_id AS group_id -FROM workflowitem INNER JOIN cwf_collectionrole ON workflowitem.collection_id = cwf_collectionrole.collection_id -WHERE workflowitem.owner IS NULL AND workflowitem.state = 3 AND cwf_collectionrole.role_id = 'editor'; - -INSERT INTO cwf_pooltask (pooltask_id,workflowitem_id, workflow_id, step_id, action_id, group_id) -SELECT -cwf_pooltask_seq.nextval AS pooltask_id, -workflowitem.workflow_id AS workflowitem_id, -'default' AS workflow_id, -'finaleditstep' AS step_id, -'claimaction' AS action_id, -cwf_collectionrole.group_id AS group_id -FROM workflowitem INNER JOIN cwf_collectionrole ON workflowitem.collection_id = cwf_collectionrole.collection_id -WHERE workflowitem.owner IS NULL AND workflowitem.state = 5 AND cwf_collectionrole.role_id = 'finaleditor'; - --- Delete resource policies for workflowitems before creating new ones -DELETE FROM resourcepolicy -WHERE dspace_object IN - (SELECT item_id FROM workflowitem); - -DELETE FROM resourcepolicy -WHERE dspace_object IN - (SELECT item2bundle.bundle_id FROM - (workflowitem INNER JOIN item2bundle ON workflowitem.item_id = item2bundle.item_id)); - -DELETE FROM resourcepolicy -WHERE dspace_object IN - (SELECT bundle2bitstream.bitstream_id FROM - ((workflowitem INNER JOIN item2bundle ON workflowitem.item_id = item2bundle.item_id) - INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id)); --- Create policies for claimtasks --- public static final int BITSTREAM = 0; --- public static final int BUNDLE = 1; --- public static final int ITEM = 2; - --- public static final int READ = 0; --- public static final int WRITE = 1; --- public static final int DELETE = 2; --- public static final int ADD = 3; --- public static final int REMOVE = 4; --- Item --- TODO: getnextID == SELECT sequence.nextval FROM DUAL!! --- Create a temporarty table with action ID's -CREATE TABLE temptable( - action_id INTEGER PRIMARY KEY -); -INSERT ALL - INTO temptable (action_id) VALUES (0) - INTO temptable (action_id) VALUES (1) - INTO temptable (action_id) VALUES (2) - INTO temptable (action_id) VALUES (3) - INTO temptable (action_id) VALUES (4) -SELECT * FROM DUAL; - -INSERT INTO resourcepolicy (policy_id, resource_type_id, dspace_object, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -2 AS resource_type_id, -cwf_workflowitem.item_id AS dspace_object, -temptable.action_id AS action_id, -cwf_claimtask.owner_id AS eperson_id -FROM (cwf_workflowitem INNER JOIN cwf_claimtask ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id), -temptable; - --- Bundles -INSERT INTO resourcepolicy (policy_id, resource_type_id, dspace_object, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -1 AS resource_type_id, -item2bundle.bundle_id AS dspace_object, -temptable.action_id AS action_id, -cwf_claimtask.owner_id AS eperson_id -FROM -( - (cwf_workflowitem INNER JOIN cwf_claimtask ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id -), temptable; - - --- Bitstreams -INSERT INTO resourcepolicy (policy_id, resource_type_id, dspace_object, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -0 AS resource_type_id, -bundle2bitstream.bitstream_id AS dspace_object, -temptable.action_id AS action_id, -cwf_claimtask.owner_id AS eperson_id -FROM -( - ((cwf_workflowitem INNER JOIN cwf_claimtask ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id) - INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id -), temptable; - - --- Create policies for pooled tasks - -INSERT INTO resourcepolicy (policy_id, resource_type_id, dspace_object, action_id, epersongroup_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -2 AS resource_type_id, -cwf_workflowitem.item_id AS dspace_object, -temptable.action_id AS action_id, -cwf_pooltask.group_id AS epersongroup_id -FROM (cwf_workflowitem INNER JOIN cwf_pooltask ON cwf_workflowitem.workflowitem_id = cwf_pooltask.workflowitem_id), -temptable; - --- Bundles -INSERT INTO resourcepolicy (policy_id, resource_type_id, dspace_object, action_id, epersongroup_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -1 AS resource_type_id, -item2bundle.bundle_id AS dspace_object, -temptable.action_id AS action_id, -cwf_pooltask.group_id AS epersongroup_id -FROM -( - (cwf_workflowitem INNER JOIN cwf_pooltask ON cwf_workflowitem.workflowitem_id = cwf_pooltask.workflowitem_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id -), temptable; - --- Bitstreams -INSERT INTO resourcepolicy (policy_id, resource_type_id, dspace_object, action_id, epersongroup_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -0 AS resource_type_id, -bundle2bitstream.bitstream_id AS dspace_object, -temptable.action_id AS action_id, -cwf_pooltask.group_id AS epersongroup_id -FROM -( - ((cwf_workflowitem INNER JOIN cwf_pooltask ON cwf_workflowitem.workflowitem_id = cwf_pooltask.workflowitem_id) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id) - INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id -), temptable; - --- Drop the temporary table with the action ID's -DROP TABLE temptable; - --- Create policies for submitter --- TODO: only add if unique -INSERT INTO resourcepolicy (policy_id, resource_type_id, dspace_object, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -2 AS resource_type_id, -cwf_workflowitem.item_id AS dspace_object, -0 AS action_id, -item.submitter_id AS eperson_id -FROM (cwf_workflowitem INNER JOIN item ON cwf_workflowitem.item_id = item.uuid); - -INSERT INTO resourcepolicy (policy_id, resource_type_id, dspace_object, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -1 AS resource_type_id, -item2bundle.bundle_id AS dspace_object, -0 AS action_id, -item.submitter_id AS eperson_id -FROM ((cwf_workflowitem INNER JOIN item ON cwf_workflowitem.item_id = item.uuid) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id - ); - -INSERT INTO resourcepolicy (policy_id, resource_type_id, dspace_object, action_id, eperson_id) -SELECT -resourcepolicy_seq.nextval AS policy_id, -0 AS resource_type_id, -bundle2bitstream.bitstream_id AS dspace_object, -0 AS action_id, -item.submitter_id AS eperson_id -FROM (((cwf_workflowitem INNER JOIN item ON cwf_workflowitem.item_id = item.uuid) - INNER JOIN item2bundle ON cwf_workflowitem.item_id = item2bundle.item_id) - INNER JOIN bundle2bitstream ON item2bundle.bundle_id = bundle2bitstream.bundle_id -); - --- TODO: not tested yet -INSERT INTO cwf_in_progress_user (in_progress_user_id, workflowitem_id, user_id, finished) -SELECT - cwf_in_progress_user_seq.nextval AS in_progress_user_id, - cwf_workflowitem.workflowitem_id AS workflowitem_id, - cwf_claimtask.owner_id AS user_id, - 0 as finished -FROM - (cwf_claimtask INNER JOIN cwf_workflowitem ON cwf_workflowitem.workflowitem_id = cwf_claimtask.workflowitem_id); - --- TODO: improve this, important is NVL(curr, 1)!! without this function, empty tables (max = [null]) will only result in sequence deletion -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(workflowitem_id) INTO curr FROM cwf_workflowitem; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_workflowitem_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_workflowitem_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(collectionrole_id) INTO curr FROM cwf_collectionrole; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_collectionrole_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_collectionrole_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(workflowitemrole_id) INTO curr FROM cwf_workflowitemrole; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_workflowitemrole_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_workflowitemrole_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(pooltask_id) INTO curr FROM cwf_pooltask; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_pooltask_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_pooltask_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(claimtask_id) INTO curr FROM cwf_claimtask; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_claimtask_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_claimtask_seq START WITH ' || NVL(curr, 1); -END; -/ - -DECLARE - curr NUMBER := 0; -BEGIN - SELECT max(in_progress_user_id) INTO curr FROM cwf_in_progress_user; - - curr := curr + 1; - - EXECUTE IMMEDIATE 'DROP SEQUENCE cwf_in_progress_user_seq'; - - EXECUTE IMMEDIATE 'CREATE SEQUENCE cwf_in_progress_user_seq START WITH ' || NVL(curr, 1); -END; -/ diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/v6.0__DS-2701_xml_workflow_migration.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/v6.0__DS-2701_xml_workflow_migration.sql deleted file mode 100644 index 541af73dfe..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/v6.0__DS-2701_xml_workflow_migration.sql +++ /dev/null @@ -1,124 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ----------------------------------------------------- --- Database Schema Update for XML/Configurable Workflow (for DSpace 6.0) --- --- This file will automatically create/update your --- DSpace Database tables to support XML/Configurable workflows. --- However, it does NOT migrate your existing classic --- workflows. That step is performed by the corresponding --- "data_workflow_migration.sql" script. --- --- This script is called automatically by the following --- Flyway Java migration class: --- org.dspace.storage.rdbms.xmlworkflow.V6_0_2015_09_01__DS_2701_Enable_XMLWorkflow_Migration ----------------------------------------------------- - -CREATE SEQUENCE cwf_workflowitem_seq; -CREATE SEQUENCE cwf_collectionrole_seq; -CREATE SEQUENCE cwf_workflowitemrole_seq; -CREATE SEQUENCE cwf_claimtask_seq; -CREATE SEQUENCE cwf_in_progress_user_seq; -CREATE SEQUENCE cwf_pooltask_seq; - - -CREATE TABLE cwf_workflowitem -( - workflowitem_id INTEGER PRIMARY KEY, - item_id RAW(16) REFERENCES item(uuid) UNIQUE, - collection_id RAW(16) REFERENCES collection(uuid), - -- - -- Answers to questions on first page of submit UI - multiple_titles NUMBER(1), - published_before NUMBER(1), - multiple_files NUMBER(1) - -- Note: stage reached not applicable here - people involved in workflow - -- can always jump around submission UI -); - - -CREATE INDEX cwf_workflowitem_coll_fk_idx ON cwf_workflowitem(collection_id); - - -CREATE TABLE cwf_collectionrole ( -collectionrole_id INTEGER PRIMARY KEY, -role_id VARCHAR2(256), -collection_id RAW(16) REFERENCES collection(uuid), -group_id RAW(16) REFERENCES epersongroup(uuid) -); -ALTER TABLE cwf_collectionrole -ADD CONSTRAINT cwf_collectionrole_unique UNIQUE (role_id, collection_id, group_id); - -CREATE INDEX cwf_cr_coll_role_fk_idx ON cwf_collectionrole(collection_id,role_id); -CREATE INDEX cwf_cr_coll_fk_idx ON cwf_collectionrole(collection_id); - - -CREATE TABLE cwf_workflowitemrole ( - workflowitemrole_id INTEGER PRIMARY KEY, - role_id VARCHAR2(256), - workflowitem_id integer REFERENCES cwf_workflowitem(workflowitem_id), - eperson_id RAW(16) REFERENCES eperson(uuid), - group_id RAW(16) REFERENCES epersongroup(uuid) -); -ALTER TABLE cwf_workflowitemrole -ADD CONSTRAINT cwf_workflowitemrole_unique UNIQUE (role_id, workflowitem_id, eperson_id, group_id); - -CREATE INDEX cwf_wfir_item_role_fk_idx ON cwf_workflowitemrole(workflowitem_id,role_id); -CREATE INDEX cwf_wfir_item_fk_idx ON cwf_workflowitemrole(workflowitem_id); - - -CREATE TABLE cwf_pooltask ( - pooltask_id INTEGER PRIMARY KEY, - workflowitem_id INTEGER REFERENCES cwf_workflowitem(workflowitem_id), - workflow_id VARCHAR2(256), - step_id VARCHAR2(256), - action_id VARCHAR2(256), - eperson_id RAW(16) REFERENCES EPerson(uuid), - group_id RAW(16) REFERENCES epersongroup(uuid) -); - -CREATE INDEX cwf_pt_eperson_fk_idx ON cwf_pooltask(eperson_id); -CREATE INDEX cwf_pt_workflow_fk_idx ON cwf_pooltask(workflowitem_id); -CREATE INDEX cwf_pt_workflow_eperson_fk_idx ON cwf_pooltask(eperson_id,workflowitem_id); - - - -CREATE TABLE cwf_claimtask ( - claimtask_id INTEGER PRIMARY KEY, - workflowitem_id integer REFERENCES cwf_workflowitem(workflowitem_id), - workflow_id VARCHAR2(256), - step_id VARCHAR2(256), - action_id VARCHAR2(256), - owner_id RAW(16) REFERENCES eperson(uuid) -); - -ALTER TABLE cwf_claimtask -ADD CONSTRAINT cwf_claimtask_unique UNIQUE (step_id, workflowitem_id, workflow_id, owner_id, action_id); - -CREATE INDEX cwf_ct_workflow_fk_idx ON cwf_claimtask(workflowitem_id); -CREATE INDEX cwf_ct_workflow_eperson_fk_idx ON cwf_claimtask(workflowitem_id,owner_id); -CREATE INDEX cwf_ct_eperson_fk_idx ON cwf_claimtask(owner_id); -CREATE INDEX cwf_ct_wfs_fk_idx ON cwf_claimtask(workflowitem_id,step_id); -CREATE INDEX cwf_ct_wfs_action_fk_idx ON cwf_claimtask(workflowitem_id,step_id,action_id); -CREATE INDEX cwf_ct_wfs_action_e_fk_idx ON cwf_claimtask(workflowitem_id,step_id,action_id,owner_id); - - -CREATE TABLE cwf_in_progress_user ( - in_progress_user_id INTEGER PRIMARY KEY, - workflowitem_id integer REFERENCES cwf_workflowitem(workflowitem_id), - user_id RAW(16) REFERENCES eperson(uuid), - finished NUMBER(1) DEFAULT 0 -); - -ALTER TABLE cwf_in_progress_user -ADD CONSTRAINT cwf_in_progress_user_unique UNIQUE (workflowitem_id, user_id); - -CREATE INDEX cwf_ipu_workflow_fk_idx ON cwf_in_progress_user(workflowitem_id); -CREATE INDEX cwf_ipu_eperson_fk_idx ON cwf_in_progress_user(user_id); - diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/xml_workflow_migration.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/xml_workflow_migration.sql deleted file mode 100644 index f8f0e564e8..0000000000 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/xmlworkflow/oracle/xml_workflow_migration.sql +++ /dev/null @@ -1,124 +0,0 @@ --- --- The contents of this file are subject to the license and copyright --- detailed in the LICENSE and NOTICE files at the root of the source --- tree and available online at --- --- http://www.dspace.org/license/ --- - ----------------------------------------------------- --- Database Schema Update for XML/Configurable Workflow --- --- This file will automatically create/update your --- DSpace Database tables to support XML/Configurable workflows. --- However, it does NOT migrate your existing classic --- workflows. That step is performed by the corresponding --- "data_workflow_migration.sql" script. --- --- This script is called automatically by the following --- Flyway Java migration class: --- org.dspace.storage.rdbms.migration.V5_0_2014_01_01__XMLWorkflow_Migration ----------------------------------------------------- - -CREATE SEQUENCE cwf_workflowitem_seq; -CREATE SEQUENCE cwf_collectionrole_seq; -CREATE SEQUENCE cwf_workflowitemrole_seq; -CREATE SEQUENCE cwf_claimtask_seq; -CREATE SEQUENCE cwf_in_progress_user_seq; -CREATE SEQUENCE cwf_pooltask_seq; - - -CREATE TABLE cwf_workflowitem -( - workflowitem_id INTEGER PRIMARY KEY, - item_id INTEGER REFERENCES item(item_id) UNIQUE, - collection_id INTEGER REFERENCES collection(collection_id), - -- - -- Answers to questions on first page of submit UI - multiple_titles NUMBER(1), - published_before NUMBER(1), - multiple_files NUMBER(1) - -- Note: stage reached not applicable here - people involved in workflow - -- can always jump around submission UI -); - - -CREATE INDEX cwf_workflowitem_coll_fk_idx ON cwf_workflowitem(collection_id); - - -CREATE TABLE cwf_collectionrole ( -collectionrole_id INTEGER PRIMARY KEY, -role_id VARCHAR2(256), -collection_id integer REFERENCES collection(collection_id), -group_id integer REFERENCES epersongroup(eperson_group_id) -); -ALTER TABLE cwf_collectionrole -ADD CONSTRAINT cwf_collectionrole_unique UNIQUE (role_id, collection_id, group_id); - -CREATE INDEX cwf_cr_coll_role_fk_idx ON cwf_collectionrole(collection_id,role_id); -CREATE INDEX cwf_cr_coll_fk_idx ON cwf_collectionrole(collection_id); - - -CREATE TABLE cwf_workflowitemrole ( - workflowitemrole_id INTEGER PRIMARY KEY, - role_id VARCHAR2(256), - workflowitem_id integer REFERENCES cwf_workflowitem(workflowitem_id), - eperson_id integer REFERENCES eperson(eperson_id), - group_id integer REFERENCES epersongroup(eperson_group_id) -); -ALTER TABLE cwf_workflowitemrole -ADD CONSTRAINT cwf_workflowitemrole_unique UNIQUE (role_id, workflowitem_id, eperson_id, group_id); - -CREATE INDEX cwf_wfir_item_role_fk_idx ON cwf_workflowitemrole(workflowitem_id,role_id); -CREATE INDEX cwf_wfir_item_fk_idx ON cwf_workflowitemrole(workflowitem_id); - - -CREATE TABLE cwf_pooltask ( - pooltask_id INTEGER PRIMARY KEY, - workflowitem_id INTEGER REFERENCES cwf_workflowitem(workflowitem_id), - workflow_id VARCHAR2(256), - step_id VARCHAR2(256), - action_id VARCHAR2(256), - eperson_id INTEGER REFERENCES EPerson(eperson_id), - group_id INTEGER REFERENCES epersongroup(eperson_group_id) -); - -CREATE INDEX cwf_pt_eperson_fk_idx ON cwf_pooltask(eperson_id); -CREATE INDEX cwf_pt_workflow_fk_idx ON cwf_pooltask(workflowitem_id); -CREATE INDEX cwf_pt_workflow_eperson_fk_idx ON cwf_pooltask(eperson_id,workflowitem_id); - - - -CREATE TABLE cwf_claimtask ( - claimtask_id INTEGER PRIMARY KEY, - workflowitem_id integer REFERENCES cwf_workflowitem(workflowitem_id), - workflow_id VARCHAR2(256), - step_id VARCHAR2(256), - action_id VARCHAR2(256), - owner_id integer REFERENCES eperson(eperson_id) -); - -ALTER TABLE cwf_claimtask -ADD CONSTRAINT cwf_claimtask_unique UNIQUE (step_id, workflowitem_id, workflow_id, owner_id, action_id); - -CREATE INDEX cwf_ct_workflow_fk_idx ON cwf_claimtask(workflowitem_id); -CREATE INDEX cwf_ct_workflow_eperson_fk_idx ON cwf_claimtask(workflowitem_id,owner_id); -CREATE INDEX cwf_ct_eperson_fk_idx ON cwf_claimtask(owner_id); -CREATE INDEX cwf_ct_wfs_fk_idx ON cwf_claimtask(workflowitem_id,step_id); -CREATE INDEX cwf_ct_wfs_action_fk_idx ON cwf_claimtask(workflowitem_id,step_id,action_id); -CREATE INDEX cwf_ct_wfs_action_e_fk_idx ON cwf_claimtask(workflowitem_id,step_id,action_id,owner_id); - - -CREATE TABLE cwf_in_progress_user ( - in_progress_user_id INTEGER PRIMARY KEY, - workflowitem_id integer REFERENCES cwf_workflowitem(workflowitem_id), - user_id integer REFERENCES eperson(eperson_id), - finished NUMBER(1) DEFAULT 0 -); - -ALTER TABLE cwf_in_progress_user -ADD CONSTRAINT cwf_in_progress_user_unique UNIQUE (workflowitem_id, user_id); - -CREATE INDEX cwf_ipu_workflow_fk_idx ON cwf_in_progress_user(workflowitem_id); -CREATE INDEX cwf_ipu_eperson_fk_idx ON cwf_in_progress_user(user_id); - From 7a209d228aef5689132656a35dca7849b99813b9 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Wed, 3 May 2023 13:37:36 -0500 Subject: [PATCH 039/125] Remove oracle specific Java code / comments. --- .../org/dspace/eperson/GroupServiceImpl.java | 2 - .../dspace/storage/rdbms/DatabaseUtils.java | 50 ++----------------- .../rdbms/migration/MigrationUtils.java | 16 ------ ...Drop_constraint_for_DSpace_1_4_schema.java | 5 +- ...Drop_constraint_for_DSpace_1_6_schema.java | 5 +- ...adata_For_All_Objects_drop_constraint.java | 5 +- ...4_11_04__Enable_XMLWorkflow_Migration.java | 2 - ..._DS_2701_Enable_XMLWorkflow_Migration.java | 2 - .../postgres/upgradeToFlyway4x.sql | 2 +- 9 files changed, 12 insertions(+), 77 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java b/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java index f3dc6ca36a..607e57af0b 100644 --- a/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/eperson/GroupServiceImpl.java @@ -353,8 +353,6 @@ public class GroupServiceImpl extends DSpaceObjectServiceImpl implements List groupCache = group2GroupCacheDAO.findByChildren(context, groups); // now we have all owning groups, also grab all parents of owning groups - // yes, I know this could have been done as one big query and a union, - // but doing the Oracle port taught me to keep to simple SQL! for (Group2GroupCache group2GroupCache : groupCache) { groups.add(group2GroupCache.getParent()); } diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/DatabaseUtils.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/DatabaseUtils.java index 1464fb44ec..89010a7308 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/DatabaseUtils.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/DatabaseUtils.java @@ -75,7 +75,6 @@ public class DatabaseUtils { // Types of databases supported by DSpace. See getDbType() public static final String DBMS_POSTGRES = "postgres"; - public static final String DBMS_ORACLE = "oracle"; public static final String DBMS_H2 = "h2"; // Name of the table that Flyway uses for its migration history @@ -369,9 +368,7 @@ public class DatabaseUtils { .println("\nWARNING: ALL DATA AND TABLES IN YOUR DATABASE WILL BE PERMANENTLY DELETED.\n"); System.out.println("There is NO turning back from this action. Backup your DB before " + "continuing."); - if (dbType.equals(DBMS_ORACLE)) { - System.out.println("\nORACLE WARNING: your RECYCLEBIN will also be PURGED.\n"); - } else if (dbType.equals(DBMS_POSTGRES)) { + if (dbType.equals(DBMS_POSTGRES)) { System.out.println( "\nPOSTGRES WARNING: the '" + PostgresUtils.PGCRYPTO + "' extension will be dropped " + "if it is in the same schema as the DSpace database.\n"); @@ -467,11 +464,10 @@ public class DatabaseUtils { DatabaseMetaData meta = connection.getMetaData(); String dbType = getDbType(connection); System.out.println("\nDatabase Type: " + dbType); - if (dbType.equals(DBMS_ORACLE)) { - System.out.println("===================================="); - System.out.println("WARNING: Oracle support is deprecated!"); - System.out.println("See https://github.com/DSpace/DSpace/issues/8214"); - System.out.println("====================================="); + if (!dbType.equals(DBMS_POSTGRES) && !dbType.equals(DBMS_H2)) { + System.err.println("===================================="); + System.err.println("ERROR: Database type " + dbType + " is UNSUPPORTED!"); + System.err.println("====================================="); } System.out.println("Database URL: " + meta.getURL()); System.out.println("Database Schema: " + getSchemaName(connection)); @@ -606,10 +602,6 @@ public class DatabaseUtils { String dbType = getDbType(connection); connection.close(); - if (dbType.equals(DBMS_ORACLE)) { - log.warn("ORACLE SUPPORT IS DEPRECATED! See https://github.com/DSpace/DSpace/issues/8214"); - } - // Determine location(s) where Flyway will load all DB migrations ArrayList scriptLocations = new ArrayList<>(); @@ -946,26 +938,6 @@ public class DatabaseUtils { // First, run Flyway's clean command on database. // For MOST database types, this takes care of everything flyway.clean(); - - try (Connection connection = dataSource.getConnection()) { - // Get info about which database type we are using - String dbType = getDbType(connection); - - // If this is Oracle, the only way to entirely clean the database - // is to also purge the "Recyclebin". See: - // http://docs.oracle.com/cd/B19306_01/server.102/b14200/statements_9018.htm - if (dbType.equals(DBMS_ORACLE)) { - PreparedStatement statement = null; - try { - statement = connection.prepareStatement("PURGE RECYCLEBIN"); - statement.executeQuery(); - } finally { - if (statement != null && !statement.isClosed()) { - statement.close(); - } - } - } - } } catch (FlywayException fe) { // If any FlywayException (Runtime) is thrown, change it to a SQLException throw new SQLException("Flyway clean error occurred", fe); @@ -1214,11 +1186,6 @@ public class DatabaseUtils { // We need to filter by schema in PostgreSQL schemaFilter = true; break; - case DBMS_ORACLE: - // Oracle specific query for a sequence owned by our current DSpace user - // NOTE: No need to filter by schema for Oracle, as Schema = User - sequenceSQL = "SELECT COUNT(1) FROM user_sequences WHERE sequence_name=?"; - break; case DBMS_H2: // In H2, sequences are listed in the "information_schema.sequences" table // SEE: http://www.h2database.com/html/grammar.html#information_schema @@ -1322,11 +1289,6 @@ public class DatabaseUtils { // For PostgreSQL, the default schema is named "public" // See: http://www.postgresql.org/docs/9.0/static/ddl-schemas.html schema = "public"; - } else if (dbType.equals(DBMS_ORACLE)) { - // For Oracle, default schema is actually the user account - // See: http://stackoverflow.com/a/13341390 - DatabaseMetaData meta = connection.getMetaData(); - schema = meta.getUserName(); } else { // For H2 (in memory), there is no such thing as a schema schema = null; @@ -1552,8 +1514,6 @@ public class DatabaseUtils { String dbms_lc = prodName.toLowerCase(Locale.ROOT); if (dbms_lc.contains("postgresql")) { return DBMS_POSTGRES; - } else if (dbms_lc.contains("oracle")) { - return DBMS_ORACLE; } else if (dbms_lc.contains("h2")) { // Used for unit testing only return DBMS_H2; diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/MigrationUtils.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/MigrationUtils.java index 842fc15e16..f0c4e4e179 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/MigrationUtils.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/MigrationUtils.java @@ -78,13 +78,6 @@ public class MigrationUtils { constraintName += "_" + StringUtils.lowerCase(constraintSuffix); cascade = true; break; - case "oracle": - // In Oracle, constraints are listed in the USER_CONS_COLUMNS table - constraintNameSQL = "SELECT CONSTRAINT_NAME " + - "FROM USER_CONS_COLUMNS " + - "WHERE TABLE_NAME = ? AND COLUMN_NAME = ?"; - cascade = true; - break; case "h2": // In H2, column constraints are listed in the "INFORMATION_SCHEMA.KEY_COLUMN_USAGE" table constraintNameSQL = "SELECT DISTINCT CONSTRAINT_NAME " + @@ -160,9 +153,6 @@ public class MigrationUtils { case "postgresql": dropTableSQL = "DROP TABLE IF EXISTS " + tableName + " CASCADE"; break; - case "oracle": - dropTableSQL = "DROP TABLE " + tableName + " CASCADE CONSTRAINTS"; - break; case "h2": dropTableSQL = "DROP TABLE IF EXISTS " + tableName + " CASCADE"; break; @@ -208,9 +198,6 @@ public class MigrationUtils { case "postgresql": dropSequenceSQL = "DROP SEQUENCE IF EXISTS " + sequenceName; break; - case "oracle": - dropSequenceSQL = "DROP SEQUENCE " + sequenceName; - break; case "h2": dropSequenceSQL = "DROP SEQUENCE IF EXISTS " + sequenceName; break; @@ -256,9 +243,6 @@ public class MigrationUtils { case "postgresql": dropViewSQL = "DROP VIEW IF EXISTS " + viewName + " CASCADE"; break; - case "oracle": - dropViewSQL = "DROP VIEW " + viewName + " CASCADE CONSTRAINTS"; - break; case "h2": dropViewSQL = "DROP VIEW IF EXISTS " + viewName + " CASCADE"; break; diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_3_9__Drop_constraint_for_DSpace_1_4_schema.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_3_9__Drop_constraint_for_DSpace_1_4_schema.java index 56c5b474d9..758e745ddc 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_3_9__Drop_constraint_for_DSpace_1_4_schema.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_3_9__Drop_constraint_for_DSpace_1_4_schema.java @@ -19,10 +19,9 @@ import org.flywaydb.core.api.migration.Context; * of the "community" table. This is necessary for the upgrade from 1.3 to 1.4 *

* This class was created because the names of database constraints differs based - * on the type of database (Postgres vs. Oracle vs. H2). As such, it becomes difficult + * on the type of database (Postgres vs. H2). As such, it becomes difficult * to write simple SQL which will work for multiple database types (especially - * since unit tests require H2 and the syntax for H2 is different from either - * Oracle or Postgres). + * since unit tests require H2 and the syntax for H2 is different from Postgres). *

* NOTE: This migration class is very simple because it is meant to be used * in conjuction with the corresponding SQL script: diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_5_9__Drop_constraint_for_DSpace_1_6_schema.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_5_9__Drop_constraint_for_DSpace_1_6_schema.java index 6d82055e53..37100a17f9 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_5_9__Drop_constraint_for_DSpace_1_6_schema.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V1_5_9__Drop_constraint_for_DSpace_1_6_schema.java @@ -19,10 +19,9 @@ import org.flywaydb.core.api.migration.Context; * from 1.5 to 1.6 *

* This class was created because the names of database constraints differs based - * on the type of database (Postgres vs. Oracle vs. H2). As such, it becomes difficult + * on the type of database (Postgres vs. H2). As such, it becomes difficult * to write simple SQL which will work for multiple database types (especially - * since unit tests require H2 and the syntax for H2 is different from either - * Oracle or Postgres). + * since unit tests require H2 and the syntax for H2 is different from Postgres). *

* NOTE: This migration class is very simple because it is meant to be used * in conjuction with the corresponding SQL script: diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V5_0_2014_09_25__DS_1582_Metadata_For_All_Objects_drop_constraint.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V5_0_2014_09_25__DS_1582_Metadata_For_All_Objects_drop_constraint.java index ea72d99b6e..8e2be91127 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V5_0_2014_09_25__DS_1582_Metadata_For_All_Objects_drop_constraint.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/migration/V5_0_2014_09_25__DS_1582_Metadata_For_All_Objects_drop_constraint.java @@ -20,10 +20,9 @@ import org.flywaydb.core.api.migration.Context; * this column must be renamed to "resource_id". *

* This class was created because the names of database constraints differs based - * on the type of database (Postgres vs. Oracle vs. H2). As such, it becomes difficult + * on the type of database (Postgres vs. H2). As such, it becomes difficult * to write simple SQL which will work for multiple database types (especially - * since unit tests require H2 and the syntax for H2 is different from either - * Oracle or Postgres). + * since unit tests require H2 and the syntax for H2 is different from Postgres). *

* NOTE: This migration class is very simple because it is meant to be used * in conjuction with the corresponding SQL script: diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V5_0_2014_11_04__Enable_XMLWorkflow_Migration.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V5_0_2014_11_04__Enable_XMLWorkflow_Migration.java index b3306a9fc9..0361e68053 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V5_0_2014_11_04__Enable_XMLWorkflow_Migration.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V5_0_2014_11_04__Enable_XMLWorkflow_Migration.java @@ -67,8 +67,6 @@ public class V5_0_2014_11_04__Enable_XMLWorkflow_Migration String dbFileLocation = null; if (dbtype.toLowerCase().contains("postgres")) { dbFileLocation = "postgres"; - } else if (dbtype.toLowerCase().contains("oracle")) { - dbFileLocation = "oracle"; } else if (dbtype.toLowerCase().contains("h2")) { dbFileLocation = "h2"; } diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V6_0_2015_09_01__DS_2701_Enable_XMLWorkflow_Migration.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V6_0_2015_09_01__DS_2701_Enable_XMLWorkflow_Migration.java index 9aa0f4877c..4c1cf33653 100644 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V6_0_2015_09_01__DS_2701_Enable_XMLWorkflow_Migration.java +++ b/dspace-api/src/main/java/org/dspace/storage/rdbms/xmlworkflow/V6_0_2015_09_01__DS_2701_Enable_XMLWorkflow_Migration.java @@ -46,8 +46,6 @@ public class V6_0_2015_09_01__DS_2701_Enable_XMLWorkflow_Migration extends BaseJ String dbFileLocation = null; if (dbtype.toLowerCase().contains("postgres")) { dbFileLocation = "postgres"; - } else if (dbtype.toLowerCase().contains("oracle")) { - dbFileLocation = "oracle"; } else if (dbtype.toLowerCase().contains("h2")) { dbFileLocation = "h2"; } diff --git a/dspace-api/src/main/resources/org/dspace/storage/rdbms/flywayupgrade/postgres/upgradeToFlyway4x.sql b/dspace-api/src/main/resources/org/dspace/storage/rdbms/flywayupgrade/postgres/upgradeToFlyway4x.sql index 7548fa4c6a..edebe6e087 100644 --- a/dspace-api/src/main/resources/org/dspace/storage/rdbms/flywayupgrade/postgres/upgradeToFlyway4x.sql +++ b/dspace-api/src/main/resources/org/dspace/storage/rdbms/flywayupgrade/postgres/upgradeToFlyway4x.sql @@ -15,7 +15,7 @@ -- ----------------- -- This is the PostgreSQL upgrade script from Flyway v4.2.0, copied/borrowed from: --- https://github.com/flyway/flyway/blob/flyway-4.2.0/flyway-core/src/main/resources/org/flywaydb/core/internal/dbsupport/oracle/upgradeMetaDataTable.sql +-- https://github.com/flyway/flyway/blob/flyway-4.2.0/flyway-core/src/main/resources/org/flywaydb/core/internal/dbsupport/postgresql/upgradeMetaDataTable.sql -- -- The variables in this script are replaced in FlywayUpgradeUtils.upgradeFlywayTable() ------------------ From dac7ed2f09e0d8c9ae4931910944ef32deccc924 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Wed, 3 May 2023 14:47:16 -0500 Subject: [PATCH 040/125] Migrate to org.hibernate.type.TextType from our custom LobType --- .../org/dspace/authorize/ResourcePolicy.java | 2 +- .../org/dspace/content/MetadataValue.java | 2 +- .../java/org/dspace/orcid/OrcidHistory.java | 6 +- .../java/org/dspace/orcid/OrcidQueue.java | 4 +- .../main/java/org/dspace/scripts/Process.java | 2 +- .../rdbms/hibernate/DatabaseAwareLobType.java | 57 ------------------- 6 files changed, 8 insertions(+), 65 deletions(-) delete mode 100644 dspace-api/src/main/java/org/dspace/storage/rdbms/hibernate/DatabaseAwareLobType.java diff --git a/dspace-api/src/main/java/org/dspace/authorize/ResourcePolicy.java b/dspace-api/src/main/java/org/dspace/authorize/ResourcePolicy.java index 38b6aef45b..c781400bae 100644 --- a/dspace-api/src/main/java/org/dspace/authorize/ResourcePolicy.java +++ b/dspace-api/src/main/java/org/dspace/authorize/ResourcePolicy.java @@ -100,7 +100,7 @@ public class ResourcePolicy implements ReloadableEntity { private String rptype; @Lob - @Type(type = "org.dspace.storage.rdbms.hibernate.DatabaseAwareLobType") + @Type(type = "org.hibernate.type.TextType") @Column(name = "rpdescription") private String rpdescription; diff --git a/dspace-api/src/main/java/org/dspace/content/MetadataValue.java b/dspace-api/src/main/java/org/dspace/content/MetadataValue.java index c3deaacd80..31479e6206 100644 --- a/dspace-api/src/main/java/org/dspace/content/MetadataValue.java +++ b/dspace-api/src/main/java/org/dspace/content/MetadataValue.java @@ -60,7 +60,7 @@ public class MetadataValue implements ReloadableEntity { * The value of the field */ @Lob - @Type(type = "org.dspace.storage.rdbms.hibernate.DatabaseAwareLobType") + @Type(type = "org.hibernate.type.TextType") @Column(name = "text_value") private String value; diff --git a/dspace-api/src/main/java/org/dspace/orcid/OrcidHistory.java b/dspace-api/src/main/java/org/dspace/orcid/OrcidHistory.java index a567c6e7a7..07a79384c7 100644 --- a/dspace-api/src/main/java/org/dspace/orcid/OrcidHistory.java +++ b/dspace-api/src/main/java/org/dspace/orcid/OrcidHistory.java @@ -80,7 +80,7 @@ public class OrcidHistory implements ReloadableEntity { * A description of the synchronized resource. */ @Lob - @Type(type = "org.dspace.storage.rdbms.hibernate.DatabaseAwareLobType") + @Type(type = "org.hibernate.type.TextType") @Column(name = "description") private String description; @@ -89,7 +89,7 @@ public class OrcidHistory implements ReloadableEntity { * the owner itself. */ @Lob - @Type(type = "org.dspace.storage.rdbms.hibernate.DatabaseAwareLobType") + @Type(type = "org.hibernate.type.TextType") @Column(name = "metadata") private String metadata; @@ -104,7 +104,7 @@ public class OrcidHistory implements ReloadableEntity { * The response message incoming from ORCID. */ @Lob - @Type(type = "org.dspace.storage.rdbms.hibernate.DatabaseAwareLobType") + @Type(type = "org.hibernate.type.TextType") @Column(name = "response_message") private String responseMessage; diff --git a/dspace-api/src/main/java/org/dspace/orcid/OrcidQueue.java b/dspace-api/src/main/java/org/dspace/orcid/OrcidQueue.java index 9261f14eea..65b66cd20c 100644 --- a/dspace-api/src/main/java/org/dspace/orcid/OrcidQueue.java +++ b/dspace-api/src/main/java/org/dspace/orcid/OrcidQueue.java @@ -65,7 +65,7 @@ public class OrcidQueue implements ReloadableEntity { * A description of the resource to be synchronized. */ @Lob - @Type(type = "org.dspace.storage.rdbms.hibernate.DatabaseAwareLobType") + @Type(type = "org.hibernate.type.TextType") @Column(name = "description") private String description; @@ -89,7 +89,7 @@ public class OrcidQueue implements ReloadableEntity { */ @Lob @Column(name = "metadata") - @Type(type = "org.dspace.storage.rdbms.hibernate.DatabaseAwareLobType") + @Type(type = "org.hibernate.type.TextType") private String metadata; /** diff --git a/dspace-api/src/main/java/org/dspace/scripts/Process.java b/dspace-api/src/main/java/org/dspace/scripts/Process.java index 190d214a3c..eab3ba460c 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/Process.java +++ b/dspace-api/src/main/java/org/dspace/scripts/Process.java @@ -71,7 +71,7 @@ public class Process implements ReloadableEntity { private ProcessStatus processStatus; @Lob - @Type(type = "org.dspace.storage.rdbms.hibernate.DatabaseAwareLobType") + @Type(type = "org.hibernate.type.TextType") @Column(name = "parameters") private String parameters; diff --git a/dspace-api/src/main/java/org/dspace/storage/rdbms/hibernate/DatabaseAwareLobType.java b/dspace-api/src/main/java/org/dspace/storage/rdbms/hibernate/DatabaseAwareLobType.java deleted file mode 100644 index 95939f9902..0000000000 --- a/dspace-api/src/main/java/org/dspace/storage/rdbms/hibernate/DatabaseAwareLobType.java +++ /dev/null @@ -1,57 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.storage.rdbms.hibernate; - -import org.apache.commons.lang.StringUtils; -import org.dspace.services.ConfigurationService; -import org.dspace.services.factory.DSpaceServicesFactory; -import org.hibernate.type.AbstractSingleColumnStandardBasicType; -import org.hibernate.type.descriptor.java.StringTypeDescriptor; -import org.hibernate.type.descriptor.sql.ClobTypeDescriptor; -import org.hibernate.type.descriptor.sql.LongVarcharTypeDescriptor; -import org.hibernate.type.descriptor.sql.SqlTypeDescriptor; - -/** - * A Hibernate @Type used to properly support the CLOB in both Postgres and Oracle. - * PostgreSQL doesn't have a CLOB type, instead it's a TEXT field. - * Normally, you'd use org.hibernate.type.TextType to support TEXT, but that won't work for Oracle. - * https://github.com/hibernate/hibernate-orm/blob/5.6/hibernate-core/src/main/java/org/hibernate/type/TextType.java - * - * This Type checks if we are using PostgreSQL. - * If so, it configures Hibernate to map CLOB to LongVarChar (same as org.hibernate.type.TextType) - * If not, it uses default CLOB (which works for other databases). - */ -public class DatabaseAwareLobType extends AbstractSingleColumnStandardBasicType { - - public static final DatabaseAwareLobType INSTANCE = new DatabaseAwareLobType(); - - public DatabaseAwareLobType() { - super( getDbDescriptor(), StringTypeDescriptor.INSTANCE ); - } - - public static SqlTypeDescriptor getDbDescriptor() { - if ( isPostgres() ) { - return LongVarcharTypeDescriptor.INSTANCE; - } else { - return ClobTypeDescriptor.DEFAULT; - } - } - - private static boolean isPostgres() { - ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); - String dbDialect = configurationService.getProperty("db.dialect"); - - return StringUtils.containsIgnoreCase(dbDialect, "PostgreSQL"); - } - - @Override - public String getName() { - return "database_aware_lob"; - } -} - From 66eb8a548fe55698cd53766fb86f605f23534323 Mon Sep 17 00:00:00 2001 From: Marie Verdonck Date: Thu, 4 May 2023 20:11:47 +0200 Subject: [PATCH 041/125] Browse-by support for controlled vocabularies https://github.com/DSpace/RestContract/pull/225 --- .../authority/ChoiceAuthorityServiceImpl.java | 54 ++++++++++++++ .../DSpaceControlledVocabularyIndex.java | 45 ++++++++++++ .../service/ChoiceAuthorityService.java | 4 + .../DiscoveryConfigurationService.java | 12 +++ .../rest/converter/BrowseIndexConverter.java | 6 +- .../HierarchicalBrowseConverter.java | 42 +++++++++++ .../rest/link/BrowseEntryHalLinkFactory.java | 4 +- .../app/rest/model/BrowseIndexRest.java | 73 +++++++++++++++---- .../model/hateoas/BrowseIndexResource.java | 34 ++++++++- .../repository/BrowseEntryLinkRepository.java | 5 +- .../repository/BrowseIndexRestRepository.java | 30 +++++++- .../repository/BrowseItemLinkRepository.java | 5 +- .../repository/VocabularyRestRepository.java | 2 +- 13 files changed, 286 insertions(+), 30 deletions(-) create mode 100644 dspace-api/src/main/java/org/dspace/content/authority/DSpaceControlledVocabularyIndex.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/HierarchicalBrowseConverter.java diff --git a/dspace-api/src/main/java/org/dspace/content/authority/ChoiceAuthorityServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/authority/ChoiceAuthorityServiceImpl.java index f25e2c4646..ec8f8769be 100644 --- a/dspace-api/src/main/java/org/dspace/content/authority/ChoiceAuthorityServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/authority/ChoiceAuthorityServiceImpl.java @@ -15,6 +15,7 @@ import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; +import java.util.stream.Collectors; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; @@ -30,6 +31,8 @@ import org.dspace.content.MetadataValue; import org.dspace.content.authority.service.ChoiceAuthorityService; import org.dspace.core.Utils; import org.dspace.core.service.PluginService; +import org.dspace.discovery.configuration.DiscoveryConfigurationService; +import org.dspace.discovery.configuration.DiscoverySearchFilterFacet; import org.dspace.services.ConfigurationService; import org.springframework.beans.factory.annotation.Autowired; @@ -80,6 +83,9 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService protected Map>> authoritiesFormDefinitions = new HashMap>>(); + // Map of vocabulary authorities to and their index info equivalent + protected Map vocabularyIndexMap = new HashMap<>(); + // the item submission reader private SubmissionConfigReader itemSubmissionConfigReader; @@ -87,6 +93,8 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService protected ConfigurationService configurationService; @Autowired(required = true) protected PluginService pluginService; + @Autowired + private DiscoveryConfigurationService searchConfigurationService; final static String CHOICES_PLUGIN_PREFIX = "choices.plugin."; final static String CHOICES_PRESENTATION_PREFIX = "choices.presentation."; @@ -540,4 +548,50 @@ public final class ChoiceAuthorityServiceImpl implements ChoiceAuthorityService HierarchicalAuthority ma = (HierarchicalAuthority) getChoiceAuthorityByAuthorityName(authorityName); return ma.getParentChoice(authorityName, vocabularyId, locale); } + + @Override + public DSpaceControlledVocabularyIndex getVocabularyIndex(String nameVocab) { + if (this.vocabularyIndexMap.containsKey(nameVocab)) { + return this.vocabularyIndexMap.get(nameVocab); + } else { + init(); + ChoiceAuthority source = this.getChoiceAuthorityByAuthorityName(nameVocab); + if (source != null && source instanceof DSpaceControlledVocabulary) { + Set metadataFields = new HashSet<>(); + Map> formsToFields = this.authoritiesFormDefinitions.get(nameVocab); + for (Map.Entry> formToField : formsToFields.entrySet()) { + metadataFields.addAll(formToField.getValue().stream().map(value -> + StringUtils.replace(value, "_", ".")) + .collect(Collectors.toList())); + } + DiscoverySearchFilterFacet matchingFacet = null; + for (DiscoverySearchFilterFacet facetConfig : searchConfigurationService.getAllFacetsConfig()) { + boolean coversAllFieldsFromVocab = true; + for (String fieldFromVocab: metadataFields) { + boolean coversFieldFromVocab = false; + for (String facetMdField: facetConfig.getMetadataFields()) { + if (facetMdField.startsWith(fieldFromVocab)) { + coversFieldFromVocab = true; + break; + } + } + if (!coversFieldFromVocab) { + coversAllFieldsFromVocab = false; + break; + } + } + if (coversAllFieldsFromVocab) { + matchingFacet = facetConfig; + break; + } + } + DSpaceControlledVocabularyIndex vocabularyIndex = + new DSpaceControlledVocabularyIndex((DSpaceControlledVocabulary) source, metadataFields, + matchingFacet); + this.vocabularyIndexMap.put(nameVocab, vocabularyIndex); + return vocabularyIndex; + } + return null; + } + } } diff --git a/dspace-api/src/main/java/org/dspace/content/authority/DSpaceControlledVocabularyIndex.java b/dspace-api/src/main/java/org/dspace/content/authority/DSpaceControlledVocabularyIndex.java new file mode 100644 index 0000000000..6f350fc71e --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/content/authority/DSpaceControlledVocabularyIndex.java @@ -0,0 +1,45 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.content.authority; + +import java.util.Set; + +import org.dspace.discovery.configuration.DiscoverySearchFilterFacet; + +/** + * Helper class to transform a {@link org.dspace.content.authority.DSpaceControlledVocabulary} into a + * {@code BrowseIndexRest} + * cached by {@link org.dspace.content.authority.service.ChoiceAuthorityService#getVocabularyIndex(String)} + * + * @author Marie Verdonck (Atmire) on 04/05/2023 + */ +public class DSpaceControlledVocabularyIndex { + + protected DSpaceControlledVocabulary vocabulary; + protected Set metadataFields; + protected DiscoverySearchFilterFacet facetConfig; + + public DSpaceControlledVocabularyIndex(DSpaceControlledVocabulary controlledVocabulary, Set metadataFields, + DiscoverySearchFilterFacet facetConfig) { + this.vocabulary = controlledVocabulary; + this.metadataFields = metadataFields; + this.facetConfig = facetConfig; + } + + public DSpaceControlledVocabulary getVocabulary() { + return vocabulary; + } + + public Set getMetadataFields() { + return this.metadataFields; + } + + public DiscoverySearchFilterFacet getFacetConfig() { + return this.facetConfig; + } +} diff --git a/dspace-api/src/main/java/org/dspace/content/authority/service/ChoiceAuthorityService.java b/dspace-api/src/main/java/org/dspace/content/authority/service/ChoiceAuthorityService.java index eb34de29c1..a9fd24e947 100644 --- a/dspace-api/src/main/java/org/dspace/content/authority/service/ChoiceAuthorityService.java +++ b/dspace-api/src/main/java/org/dspace/content/authority/service/ChoiceAuthorityService.java @@ -15,6 +15,7 @@ import org.dspace.content.MetadataValue; import org.dspace.content.authority.Choice; import org.dspace.content.authority.ChoiceAuthority; import org.dspace.content.authority.Choices; +import org.dspace.content.authority.DSpaceControlledVocabularyIndex; /** * Broker for ChoiceAuthority plugins, and for other information configured @@ -220,4 +221,7 @@ public interface ChoiceAuthorityService { * @return the parent Choice object if any */ public Choice getParentChoice(String authorityName, String vocabularyId, String locale); + + public DSpaceControlledVocabularyIndex getVocabularyIndex(String nameVocab); + } diff --git a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java index 636e7ccd2a..f4fd3ca0ef 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java +++ b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java @@ -92,6 +92,18 @@ public class DiscoveryConfigurationService { return configs; } + /** + * @return All configurations for {@link org.dspace.discovery.configuration.DiscoverySearchFilterFacet} + */ + public List getAllFacetsConfig() { + List configs = new ArrayList<>(); + for (String key : map.keySet()) { + DiscoveryConfiguration config = map.get(key); + configs.addAll(config.getSidebarFacets()); + } + return configs; + } + public static void main(String[] args) { System.out.println(DSpaceServicesFactory.getInstance().getServiceManager().getServicesNames().size()); DiscoveryConfigurationService mainService = DSpaceServicesFactory.getInstance().getServiceManager() diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/BrowseIndexConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/BrowseIndexConverter.java index 6ee836e5fc..1e2899b396 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/BrowseIndexConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/BrowseIndexConverter.java @@ -7,6 +7,9 @@ */ package org.dspace.app.rest.converter; +import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_FLAT; +import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_VALUE_LIST; + import java.util.ArrayList; import java.util.List; @@ -33,14 +36,15 @@ public class BrowseIndexConverter implements DSpaceConverter metadataList = new ArrayList(); if (obj.isMetadataIndex()) { for (String s : obj.getMetadata().split(",")) { metadataList.add(s.trim()); } + bir.setBrowseType(BROWSE_TYPE_VALUE_LIST); } else { metadataList.add(obj.getSortOption().getMetadata()); + bir.setBrowseType(BROWSE_TYPE_FLAT); } bir.setMetadataList(metadataList); diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/HierarchicalBrowseConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/HierarchicalBrowseConverter.java new file mode 100644 index 0000000000..7b0cea9d8f --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/HierarchicalBrowseConverter.java @@ -0,0 +1,42 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.converter; + +import java.util.ArrayList; + +import org.dspace.app.rest.model.BrowseIndexRest; +import org.dspace.app.rest.projection.Projection; +import org.dspace.content.authority.DSpaceControlledVocabularyIndex; +import org.springframework.stereotype.Component; + +/** + * This is the converter from a {@link org.dspace.content.authority.DSpaceControlledVocabularyIndex} to a + * {@link org.dspace.app.rest.model.BrowseIndexRest#BROWSE_TYPE_HIERARCHICAL} {@link org.dspace.app.rest.model.BrowseIndexRest} + * + * @author Marie Verdonck (Atmire) on 04/05/2023 + */ +@Component +public class HierarchicalBrowseConverter implements DSpaceConverter { + + @Override + public BrowseIndexRest convert(DSpaceControlledVocabularyIndex obj, Projection projection) { + BrowseIndexRest bir = new BrowseIndexRest(); + bir.setProjection(projection); + bir.setId(obj.getVocabulary().getPluginInstanceName()); + bir.setBrowseType(BrowseIndexRest.BROWSE_TYPE_HIERARCHICAL); + bir.setFacetType(obj.getFacetConfig().getIndexFieldName()); + bir.setVocabulary(obj.getVocabulary().getPluginInstanceName()); + bir.setMetadataList(new ArrayList<>(obj.getMetadataFields())); + return bir; + } + + @Override + public Class getModelClass() { + return DSpaceControlledVocabularyIndex.class; + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/link/BrowseEntryHalLinkFactory.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/link/BrowseEntryHalLinkFactory.java index ee70dbf431..9e515984fe 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/link/BrowseEntryHalLinkFactory.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/link/BrowseEntryHalLinkFactory.java @@ -37,11 +37,11 @@ public class BrowseEntryHalLinkFactory extends HalLinkFactory { public static final String CATEGORY = RestAddressableModel.DISCOVER; - public static final String ITEMS = "items"; - public static final String ENTRIES = "entries"; + public static final String LINK_ITEMS = "items"; + public static final String LINK_ENTRIES = "entries"; + public static final String LINK_VOCABULARY = "vocabulary"; - boolean metadataBrowse; + // if the browse index has two levels, the 1st level shows the list of entries like author names, subjects, types, + // etc. the second level is the actual list of items linked to a specific entry + public static final String BROWSE_TYPE_VALUE_LIST = "valueList"; + // if the browse index has one level: the full list of items + public static final String BROWSE_TYPE_FLAT = "flatBrowse"; + // if the browse index should display the vocabulary tree. The 1st level shows the tree. + // The second level is the actual list of items linked to a specific entry + public static final String BROWSE_TYPE_HIERARCHICAL = "hierarchicalBrowse"; + // Shared fields + String browseType; @JsonProperty(value = "metadata") List metadataList; + // Single browse index fields + @JsonInclude(JsonInclude.Include.NON_NULL) String dataType; - + @JsonInclude(JsonInclude.Include.NON_NULL) List sortOptions; - + @JsonInclude(JsonInclude.Include.NON_NULL) String order; + // Hierarchical browse fields + @JsonInclude(JsonInclude.Include.NON_NULL) + String facetType; + @JsonInclude(JsonInclude.Include.NON_NULL) + String vocabulary; + @JsonIgnore @Override public String getCategory() { @@ -60,14 +79,6 @@ public class BrowseIndexRest extends BaseObjectRest { return NAME; } - public boolean isMetadataBrowse() { - return metadataBrowse; - } - - public void setMetadataBrowse(boolean metadataBrowse) { - this.metadataBrowse = metadataBrowse; - } - public List getMetadataList() { return metadataList; } @@ -100,6 +111,38 @@ public class BrowseIndexRest extends BaseObjectRest { this.sortOptions = sortOptions; } + /** + * - valueList => if the browse index has two levels, the 1st level shows the list of entries like author names, + * subjects, types, etc. the second level is the actual list of items linked to a specific entry + * - flatBrowse if the browse index has one level: the full list of items + * - hierarchicalBrowse if the browse index should display the vocabulary tree. The 1st level shows the tree. + * The second level is the actual list of items linked to a specific entry + */ + public void setBrowseType(String browseType) { + this.browseType = browseType; + } + + public String getBrowseType() { + return browseType; + } + + public void setFacetType(String facetType) { + this.facetType = facetType; + } + + public String getFacetType() { + return facetType; + } + + public void setVocabulary(String vocabulary) { + this.vocabulary = vocabulary; + } + + + public String getVocabulary() { + return vocabulary; + } + @Override public Class getController() { return RestResourceController.class; diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/hateoas/BrowseIndexResource.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/hateoas/BrowseIndexResource.java index f6c821595f..61158704ea 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/hateoas/BrowseIndexResource.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/hateoas/BrowseIndexResource.java @@ -7,9 +7,20 @@ */ package org.dspace.app.rest.model.hateoas; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.linkTo; +import static org.springframework.hateoas.server.mvc.WebMvcLinkBuilder.methodOn; + +import org.atteo.evo.inflector.English; +import org.dspace.app.rest.RestResourceController; import org.dspace.app.rest.model.BrowseIndexRest; +import org.dspace.app.rest.model.VocabularyRest; import org.dspace.app.rest.model.hateoas.annotations.RelNameDSpaceResource; import org.dspace.app.rest.utils.Utils; +import org.dspace.content.authority.ChoiceAuthority; +import org.dspace.content.authority.factory.ContentAuthorityServiceFactory; +import org.dspace.content.authority.service.ChoiceAuthorityService; +import org.springframework.hateoas.Link; +import org.springframework.web.util.UriComponentsBuilder; /** * Browse Index Rest HAL Resource. The HAL Resource wraps the REST Resource @@ -19,15 +30,32 @@ import org.dspace.app.rest.utils.Utils; */ @RelNameDSpaceResource(BrowseIndexRest.NAME) public class BrowseIndexResource extends DSpaceResource { + + public BrowseIndexResource(BrowseIndexRest bix, Utils utils) { super(bix, utils); // TODO: the following code will force the embedding of items and // entries in the browseIndex we need to find a way to populate the rels // array from the request/projection right now it is always null // super(bix, utils, "items", "entries"); - if (bix.isMetadataBrowse()) { - add(utils.linkToSubResource(bix, BrowseIndexRest.ENTRIES)); + if (bix.getBrowseType().equals(BrowseIndexRest.BROWSE_TYPE_VALUE_LIST)) { + add(utils.linkToSubResource(bix, BrowseIndexRest.LINK_ENTRIES)); + add(utils.linkToSubResource(bix, BrowseIndexRest.LINK_ITEMS)); + } + if (bix.getBrowseType().equals(BrowseIndexRest.BROWSE_TYPE_FLAT)) { + add(utils.linkToSubResource(bix, BrowseIndexRest.LINK_ITEMS)); + } + if (bix.getBrowseType().equals(BrowseIndexRest.BROWSE_TYPE_HIERARCHICAL)) { + ChoiceAuthorityService choiceAuthorityService = + ContentAuthorityServiceFactory.getInstance().getChoiceAuthorityService(); + ChoiceAuthority source = choiceAuthorityService.getChoiceAuthorityByAuthorityName(bix.getVocabulary()); + UriComponentsBuilder baseLink = linkTo( + methodOn(RestResourceController.class, VocabularyRest.AUTHENTICATION).findRel(null, + null, VocabularyRest.CATEGORY, + English.plural(VocabularyRest.NAME), source.getPluginInstanceName(), + "", null, null)).toUriComponentsBuilder(); + + add(Link.of(baseLink.build().encode().toUriString(), BrowseIndexRest.LINK_VOCABULARY)); } - add(utils.linkToSubResource(bix, BrowseIndexRest.ITEMS)); } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseEntryLinkRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseEntryLinkRepository.java index 93224f78cd..f608595c3d 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseEntryLinkRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseEntryLinkRepository.java @@ -40,7 +40,7 @@ import org.springframework.stereotype.Component; * * @author Andrea Bollini (andrea.bollini at 4science.it) */ -@Component(BrowseIndexRest.CATEGORY + "." + BrowseIndexRest.NAME + "." + BrowseIndexRest.ENTRIES) +@Component(BrowseIndexRest.CATEGORY + "." + BrowseIndexRest.NAME + "." + BrowseIndexRest.LINK_ENTRIES) public class BrowseEntryLinkRepository extends AbstractDSpaceRestRepository implements LinkRestRepository { @@ -127,7 +127,8 @@ public class BrowseEntryLinkRepository extends AbstractDSpaceRestRepository @Override public boolean isEmbeddableRelation(Object data, String name) { BrowseIndexRest bir = (BrowseIndexRest) data; - if (bir.isMetadataBrowse() && "entries".equals(name)) { + if (bir.getBrowseType().equals(BrowseIndexRest.BROWSE_TYPE_VALUE_LIST) && + name.equals(BrowseIndexRest.LINK_ENTRIES)) { return true; } return false; diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseIndexRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseIndexRestRepository.java index 01277ff29b..c87cbc6c03 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseIndexRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseIndexRestRepository.java @@ -13,7 +13,10 @@ import java.util.List; import org.dspace.app.rest.model.BrowseIndexRest; import org.dspace.browse.BrowseException; import org.dspace.browse.BrowseIndex; +import org.dspace.content.authority.DSpaceControlledVocabularyIndex; +import org.dspace.content.authority.service.ChoiceAuthorityService; import org.dspace.core.Context; +import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; import org.springframework.security.access.prepost.PreAuthorize; @@ -27,20 +30,39 @@ import org.springframework.stereotype.Component; @Component(BrowseIndexRest.CATEGORY + "." + BrowseIndexRest.NAME) public class BrowseIndexRestRepository extends DSpaceRestRepository { + @Autowired + private ChoiceAuthorityService choiceAuthorityService; + @Override @PreAuthorize("permitAll()") public BrowseIndexRest findOne(Context context, String name) { - BrowseIndexRest bi = null; + BrowseIndexRest bi = createFromMatchingBrowseIndex(name); + if (bi == null) { + bi = createFromMatchingVocabulary(name); + } + + return bi; + } + + private BrowseIndexRest createFromMatchingVocabulary(String name) { + DSpaceControlledVocabularyIndex vocabularyIndex = choiceAuthorityService.getVocabularyIndex(name); + if (vocabularyIndex != null) { + return converter.toRest(vocabularyIndex, utils.obtainProjection()); + } + return null; + } + + private BrowseIndexRest createFromMatchingBrowseIndex(String name) { BrowseIndex bix; try { - bix = BrowseIndex.getBrowseIndex(name); + bix = BrowseIndex.getBrowseIndex(name); } catch (BrowseException e) { throw new RuntimeException(e.getMessage(), e); } if (bix != null) { - bi = converter.toRest(bix, utils.obtainProjection()); + return converter.toRest(bix, utils.obtainProjection()); } - return bi; + return null; } @Override diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseItemLinkRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseItemLinkRepository.java index 74aa9f38bf..baa79bc80a 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseItemLinkRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseItemLinkRepository.java @@ -42,7 +42,7 @@ import org.springframework.stereotype.Component; * * @author Andrea Bollini (andrea.bollini at 4science.it) */ -@Component(BrowseIndexRest.CATEGORY + "." + BrowseIndexRest.NAME + "." + BrowseIndexRest.ITEMS) +@Component(BrowseIndexRest.CATEGORY + "." + BrowseIndexRest.NAME + "." + BrowseIndexRest.LINK_ITEMS) public class BrowseItemLinkRepository extends AbstractDSpaceRestRepository implements LinkRestRepository { @@ -155,7 +155,8 @@ public class BrowseItemLinkRepository extends AbstractDSpaceRestRepository @Override public boolean isEmbeddableRelation(Object data, String name) { BrowseIndexRest bir = (BrowseIndexRest) data; - if (!bir.isMetadataBrowse() && "items".equals(name)) { + if (bir.getBrowseType().equals(BrowseIndexRest.BROWSE_TYPE_FLAT) && + name.equals(BrowseIndexRest.LINK_ITEMS)) { return true; } return false; diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/VocabularyRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/VocabularyRestRepository.java index dcdf71186b..fcc37d1316 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/VocabularyRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/VocabularyRestRepository.java @@ -53,7 +53,7 @@ public class VocabularyRestRepository extends DSpaceRestRepository Date: Fri, 5 May 2023 12:35:40 +0200 Subject: [PATCH 042/125] 94299: Remove bitstreams in bulk via patch --- .../src/main/resources/Messages.properties | 2 + .../rest/BitstreamCategoryRestController.java | 63 ++++++++++++++++++ .../DSpaceApiExceptionControllerAdvice.java | 1 + .../RESTBitstreamNotFoundException.java | 51 +++++++++++++++ .../repository/BitstreamRestRepository.java | 19 ++++++ .../operation/BitstreamRemoveOperation.java | 65 +++++++++++++++++++ 6 files changed, 201 insertions(+) create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/BitstreamCategoryRestController.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/exception/RESTBitstreamNotFoundException.java create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/BitstreamRemoveOperation.java diff --git a/dspace-api/src/main/resources/Messages.properties b/dspace-api/src/main/resources/Messages.properties index b537819c06..78e2774013 100644 --- a/dspace-api/src/main/resources/Messages.properties +++ b/dspace-api/src/main/resources/Messages.properties @@ -120,3 +120,5 @@ org.dspace.app.rest.exception.RESTEmptyWorkflowGroupException.message = Refused org.dspace.app.rest.exception.EPersonNameNotProvidedException.message = The eperson.firstname and eperson.lastname values need to be filled in org.dspace.app.rest.exception.GroupNameNotProvidedException.message = Cannot create group, no group name is provided org.dspace.app.rest.exception.GroupHasPendingWorkflowTasksException.message = Cannot delete group, the associated workflow role still has pending tasks +org.dspace.app.rest.exception.RESTBitstreamNotFoundException.message = Bitstream with uuid {0} could not be found in \ + the repository diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/BitstreamCategoryRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/BitstreamCategoryRestController.java new file mode 100644 index 0000000000..13929e5a9a --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/BitstreamCategoryRestController.java @@ -0,0 +1,63 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest; + +import static org.dspace.app.rest.utils.ContextUtil.obtainContext; + +import java.sql.SQLException; +import javax.servlet.http.HttpServletRequest; + +import com.fasterxml.jackson.databind.JsonNode; +import org.dspace.app.rest.model.BitstreamRest; +import org.dspace.app.rest.repository.BitstreamRestRepository; +import org.dspace.authorize.AuthorizeException; +import org.dspace.core.Context; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.hateoas.RepresentationModel; +import org.springframework.http.ResponseEntity; +import org.springframework.security.access.prepost.PreAuthorize; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.bind.annotation.RestController; + +/** + * REST controller for handling bulk updates to Bitstream resources. + *

+ * This controller is responsible for handling requests to the bitstream category, which allows for updating + * multiple bitstream resources in a single operation. + *

+ * + * @author Jens Vannerum (jens.vannerum@atmire.com) + */ +@RestController +@RequestMapping("/api/" + BitstreamRest.CATEGORY + "/" + BitstreamRest.PLURAL_NAME) +public class BitstreamCategoryRestController { + @Autowired + BitstreamRestRepository bitstreamRestRepository; + + /** + * Handles PATCH requests to the bitstream category for bulk updates of bitstream resources. + * + * @param request the HTTP request object. + * @param jsonNode the JSON representation of the bulk update operation, containing the updates to be applied. + * @return a ResponseEntity representing the HTTP response to be sent back to the client, in this case, a + * HTTP 204 No Content response since currently only a delete operation is supported. + * @throws SQLException if an error occurs while accessing the database. + * @throws AuthorizeException if the user is not authorized to perform the requested operation. + */ + @PreAuthorize("hasAuthority('ADMIN')") + @RequestMapping(method = RequestMethod.PATCH) + public ResponseEntity> patch(HttpServletRequest request, + @RequestBody(required = true) JsonNode jsonNode) + throws SQLException, AuthorizeException { + Context context = obtainContext(request); + bitstreamRestRepository.patchBitstreamsInBulk(context, jsonNode); + return ResponseEntity.noContent().build(); + } +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/exception/DSpaceApiExceptionControllerAdvice.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/exception/DSpaceApiExceptionControllerAdvice.java index 6ded477813..3f55536666 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/exception/DSpaceApiExceptionControllerAdvice.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/exception/DSpaceApiExceptionControllerAdvice.java @@ -163,6 +163,7 @@ public class DSpaceApiExceptionControllerAdvice extends ResponseEntityExceptionH EPersonNameNotProvidedException.class, GroupNameNotProvidedException.class, GroupHasPendingWorkflowTasksException.class, + RESTBitstreamNotFoundException.class }) protected void handleCustomUnprocessableEntityException(HttpServletRequest request, HttpServletResponse response, TranslatableException ex) throws IOException { diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/exception/RESTBitstreamNotFoundException.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/exception/RESTBitstreamNotFoundException.java new file mode 100644 index 0000000000..a0b48e3c0d --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/exception/RESTBitstreamNotFoundException.java @@ -0,0 +1,51 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.exception; + +import java.text.MessageFormat; + +import org.dspace.core.Context; +import org.dspace.core.I18nUtil; + +/** + *

Extend {@link UnprocessableEntityException} to provide a specific error message + * in the REST response. The error message is added to the response in + * {@link DSpaceApiExceptionControllerAdvice#handleCustomUnprocessableEntityException}, + * hence it should not contain sensitive or security-compromising info.

+ * + * @author Jens Vannerum (jens.vannerum@atmire.com) + */ +public class RESTBitstreamNotFoundException extends UnprocessableEntityException implements TranslatableException { + + public static String uuid; + + /** + * @param formatStr string with placeholders, ideally obtained using {@link I18nUtil} + * @return message with bitstream id substituted + */ + private static String formatMessage(String formatStr) { + MessageFormat fmt = new MessageFormat(formatStr); + return fmt.format(new String[]{uuid}); + } + + public static final String MESSAGE_KEY = "org.dspace.app.rest.exception.RESTBitstreamNotFoundException.message"; + + public RESTBitstreamNotFoundException(String uuid) { + super(formatMessage(I18nUtil.getMessage(MESSAGE_KEY))); + RESTBitstreamNotFoundException.uuid = uuid; + } + + public String getMessageKey() { + return MESSAGE_KEY; + } + + public String getLocalizedMessage(Context context) { + return formatMessage(I18nUtil.getMessage(MESSAGE_KEY, context)); + } + +} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java index 3696b38668..8ef06ecbad 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java @@ -17,9 +17,12 @@ import java.util.List; import java.util.UUID; import javax.servlet.http.HttpServletRequest; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.commons.lang3.StringUtils; import org.dspace.app.rest.Parameter; import org.dspace.app.rest.SearchRestMethod; +import org.dspace.app.rest.converter.JsonPatchConverter; import org.dspace.app.rest.exception.DSpaceBadRequestException; import org.dspace.app.rest.exception.RepositoryMethodNotImplementedException; import org.dspace.app.rest.exception.UnprocessableEntityException; @@ -292,4 +295,20 @@ public class BitstreamRestRepository extends DSpaceObjectRestRepository + * curl -X PATCH http://${dspace.server.url}/api/core/bitstreams -H "Content-Type: application/json" + * -d '[ + * {"op": "remove", "path": "/bitstreams/${bitstream1UUID}"}, + * {"op": "remove", "path": "/bitstreams/${bitstream2UUID}"}, + * {"op": "remove", "path": "/bitstreams/${bitstream3UUID}"} + * ]' + * + * + * @author Jens Vannerum (jens.vannerum@atmire.com) + */ +@Component +public class BitstreamRemoveOperation extends PatchOperation { + @Autowired + BitstreamService bitstreamService; + private static final String OPERATION_PATH_BITSTREAM_REMOVE = "/bitstreams/"; + + @Override + public Bitstream perform(Context context, Bitstream resource, Operation operation) throws SQLException { + String bitstreamIDtoDelete = operation.getPath().replace(OPERATION_PATH_BITSTREAM_REMOVE, ""); + Bitstream bitstreamToDelete = bitstreamService.find(context, UUID.fromString(bitstreamIDtoDelete)); + if (bitstreamToDelete == null) { + throw new RESTBitstreamNotFoundException(bitstreamIDtoDelete); + } + + try { + bitstreamService.delete(context, bitstreamToDelete); + bitstreamService.update(context, bitstreamToDelete); + } catch (AuthorizeException | IOException e) { + throw new RuntimeException(e.getMessage(), e); + } + return null; + } + + @Override + public boolean supports(Object objectToMatch, Operation operation) { + return objectToMatch == null && operation.getOp().trim().equalsIgnoreCase(OPERATION_REMOVE) && + operation.getPath().trim().startsWith(OPERATION_PATH_BITSTREAM_REMOVE); + } +} From 09b56c2d99b770d376a3e49cb7f01b3ca0a4f5eb Mon Sep 17 00:00:00 2001 From: Jens Vannerum Date: Fri, 5 May 2023 13:05:34 +0200 Subject: [PATCH 043/125] 94299: Configurable limit on amount of patch operations --- .../app/rest/repository/BitstreamRestRepository.java | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java index 8ef06ecbad..586525bbd2 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java @@ -43,6 +43,7 @@ import org.dspace.content.service.CollectionService; import org.dspace.content.service.CommunityService; import org.dspace.core.Context; import org.dspace.handle.service.HandleService; +import org.dspace.services.ConfigurationService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; @@ -77,6 +78,9 @@ public class BitstreamRestRepository extends DSpaceObjectRestRepository operationsLimit) { + throw new DSpaceBadRequestException("The number of operations in the patch is over the limit of " + + operationsLimit); + } resourcePatch.patch(obtainContext(), null, patch.getOperations()); context.commit(); } From 80706592aae50681dde850391770e6fe09ee6eca Mon Sep 17 00:00:00 2001 From: Jens Vannerum Date: Fri, 5 May 2023 13:07:47 +0200 Subject: [PATCH 044/125] Revert "94299 Multiple Bitstream deletion endpoint" This reverts commit 51d8874a --- .../app/rest/RestResourceController.java | 33 - .../repository/BitstreamRestRepository.java | 44 - .../rest/repository/DSpaceRestRepository.java | 18 - .../app/rest/BitstreamRestRepositoryIT.java | 955 ------------------ 4 files changed, 1050 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/RestResourceController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/RestResourceController.java index 24468660f0..b82b483075 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/RestResourceController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/RestResourceController.java @@ -7,7 +7,6 @@ */ package org.dspace.app.rest; -import static org.dspace.app.rest.utils.ContextUtil.obtainContext; import static org.dspace.app.rest.utils.RegexUtils.REGEX_REQUESTMAPPING_IDENTIFIER_AS_DIGIT; import static org.dspace.app.rest.utils.RegexUtils.REGEX_REQUESTMAPPING_IDENTIFIER_AS_HEX32; import static org.dspace.app.rest.utils.RegexUtils.REGEX_REQUESTMAPPING_IDENTIFIER_AS_STRING_VERSION_STRONG; @@ -56,8 +55,6 @@ import org.dspace.app.rest.repository.LinkRestRepository; import org.dspace.app.rest.utils.RestRepositoryUtils; import org.dspace.app.rest.utils.Utils; import org.dspace.authorize.AuthorizeException; -import org.dspace.content.DSpaceObject; -import org.dspace.core.Context; import org.dspace.util.UUIDUtils; import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; @@ -1053,13 +1050,6 @@ public class RestResourceController implements InitializingBean { return deleteInternal(apiCategory, model, uuid); } - @RequestMapping(method = RequestMethod.DELETE, consumes = {"text/uri-list"}) - public ResponseEntity> delete(HttpServletRequest request, @PathVariable String apiCategory, - @PathVariable String model) - throws HttpRequestMethodNotSupportedException { - return deleteUriListInternal(request, apiCategory, model); - } - /** * Internal method to delete resource. * @@ -1077,29 +1067,6 @@ public class RestResourceController implements InitializingBean { return ControllerUtils.toEmptyResponse(HttpStatus.NO_CONTENT); } - public ResponseEntity> deleteUriListInternal( - HttpServletRequest request, - String apiCategory, - String model) - throws HttpRequestMethodNotSupportedException { - checkModelPluralForm(apiCategory, model); - DSpaceRestRepository repository = utils.getResourceRepository(apiCategory, model); - Context context = obtainContext(request); - List dsoStringList = utils.getStringListFromRequest(request); - List dsoList = utils.constructDSpaceObjectList(context, dsoStringList); - if (dsoStringList.size() != dsoList.size()) { - throw new ResourceNotFoundException("One or more bitstreams could not be found."); - } - try { - repository.delete(dsoList); - } catch (ClassCastException e) { - log.error("Something went wrong whilst creating the object for apiCategory: " + apiCategory + - " and model: " + model, e); - return ControllerUtils.toEmptyResponse(HttpStatus.INTERNAL_SERVER_ERROR); - } - return ControllerUtils.toEmptyResponse(HttpStatus.NO_CONTENT); - } - /** * Execute a PUT request for an entity with id of type UUID; * diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java index 586525bbd2..454b6f8453 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java @@ -10,8 +10,6 @@ package org.dspace.app.rest.repository; import java.io.IOException; import java.io.InputStream; import java.sql.SQLException; -import java.util.ArrayList; -import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.UUID; @@ -156,48 +154,6 @@ public class BitstreamRestRepository extends DSpaceObjectRestRepository dsoList) - throws SQLException, AuthorizeException { - // check if list is empty - if (dsoList.isEmpty()) { - throw new ResourceNotFoundException("No bitstreams given."); - } - // check if every DSO is a Bitstream - if (dsoList.stream().anyMatch(dso -> !(dso instanceof Bitstream))) { - throw new UnprocessableEntityException("Not all given items are bitstreams."); - } - // check that they're all part of the same Item - List parents = new ArrayList<>(); - for (DSpaceObject dso : dsoList) { - Bitstream bit = bs.find(context, dso.getID()); - DSpaceObject bitstreamParent = bs.getParentObject(context, bit); - if (bit == null) { - throw new ResourceNotFoundException("The bitstream with uuid " + dso.getID() + " could not be found"); - } - // we have to check if the bitstream has already been deleted - if (bit.isDeleted()) { - throw new UnprocessableEntityException("The bitstream with uuid " + bit.getID() - + " was already deleted"); - } else { - parents.add(bitstreamParent); - } - } - if (parents.stream().distinct().count() > 1) { - throw new UnprocessableEntityException("Not all given items are part of the same Item."); - } - // delete all Bitstreams - Iterator iterator = dsoList.iterator(); - while (iterator.hasNext()) { - Bitstream bit = (Bitstream) iterator.next(); - try { - bs.delete(context, bit); - } catch (SQLException | IOException e) { - throw new RuntimeException(e.getMessage(), e); - } - } - } - /** * Find the bitstream for the provided handle and sequence or filename. * When a bitstream can be found with the sequence ID it will be returned if the user has "METADATA_READ" access. diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DSpaceRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DSpaceRestRepository.java index 219b7c4123..01f127eca5 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DSpaceRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DSpaceRestRepository.java @@ -26,7 +26,6 @@ import org.dspace.app.rest.model.ItemRest; import org.dspace.app.rest.model.RestAddressableModel; import org.dspace.app.rest.model.patch.Patch; import org.dspace.authorize.AuthorizeException; -import org.dspace.content.DSpaceObject; import org.dspace.content.service.MetadataFieldService; import org.dspace.core.Context; import org.springframework.beans.factory.BeanNameAware; @@ -257,23 +256,6 @@ public abstract class DSpaceRestRepository dsoList) { - Context context = obtainContext(); - try { - getThisRepository().deleteList(context, dsoList); - context.commit(); - } catch (AuthorizeException e) { - throw new RESTAuthorizationException(e); - } catch (SQLException ex) { - throw new RuntimeException(ex.getMessage(), ex); - } - } - - protected void deleteList(Context context, List list) - throws AuthorizeException, SQLException, RepositoryMethodNotImplementedException { - throw new RepositoryMethodNotImplementedException("No implementation found; Method not allowed!", ""); - } - @Override /** * This method cannot be implemented we required all the find method to be paginated diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java index 391d9e4193..f9c1e469fc 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java @@ -13,7 +13,6 @@ import static org.dspace.core.Constants.WRITE; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; -import static org.springframework.data.rest.webmvc.RestMediaTypes.TEXT_URI_LIST; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; @@ -1202,960 +1201,6 @@ public class BitstreamRestRepositoryIT extends AbstractControllerIntegrationTest .andExpect(status().isNotFound()); } - @Test - public void deleteListOneBitstream() throws Exception { - - //We turn off the authorization system in order to create the structure as defined below - context.turnOffAuthorisationSystem(); - - //** GIVEN ** - //1. A community-collection structure with one parent community with sub-community and one collection. - parentCommunity = CommunityBuilder.createCommunity(context) - .withName("Parent Community") - .build(); - Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) - .withName("Sub Community") - .build(); - Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); - - //2. One public items that is readable by Anonymous - Item publicItem1 = ItemBuilder.createItem(context, col1) - .withTitle("Test") - .withIssueDate("2010-10-17") - .withAuthor("Smith, Donald") - .withSubject("ExtraEntry") - .build(); - - String bitstreamContent = "ThisIsSomeDummyText"; - //Add a bitstream to an item - Bitstream bitstream = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { - bitstream = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream") - .withDescription("Description") - .withMimeType("text/plain") - .build(); - } - - context.restoreAuthSystemState(); - - String token = getAuthToken(admin.getEmail(), password); - - // Delete - getClient(token).perform(delete("/api/core/bitstreams") - .contentType(TEXT_URI_LIST) - .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream.getID())) - .andExpect(status().is(204)); - - // Verify 404 after delete - getClient(token).perform(get("/api/core/bitstreams/" + bitstream.getID())) - .andExpect(status().isNotFound()); - } - - @Test - public void deleteListOneOfMultipleBitstreams() throws Exception { - - //We turn off the authorization system in order to create the structure as defined below - context.turnOffAuthorisationSystem(); - - //** GIVEN ** - //1. A community-collection structure with one parent community with sub-community and one collection. - parentCommunity = CommunityBuilder.createCommunity(context) - .withName("Parent Community") - .build(); - Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) - .withName("Sub Community") - .build(); - Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); - - //2. One public items that is readable by Anonymous - Item publicItem1 = ItemBuilder.createItem(context, col1) - .withTitle("Test") - .withIssueDate("2010-10-17") - .withAuthor("Smith, Donald") - .withSubject("ExtraEntry") - .build(); - - // Add 3 bitstreams to the item - String bitstreamContent1 = "ThisIsSomeDummyText1"; - Bitstream bitstream1 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent1, CharEncoding.UTF_8)) { - bitstream1 = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream1") - .withDescription("Description1") - .withMimeType("text/plain") - .build(); - } - - String bitstreamContent2 = "ThisIsSomeDummyText2"; - Bitstream bitstream2 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent2, CharEncoding.UTF_8)) { - bitstream2 = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream2") - .withDescription("Description2") - .withMimeType("text/plain") - .build(); - } - - String bitstreamContent3 = "ThisIsSomeDummyText3"; - Bitstream bitstream3 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent3, CharEncoding.UTF_8)) { - bitstream3 = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream3") - .withDescription("Description3") - .withMimeType("text/plain") - .build(); - } - - context.restoreAuthSystemState(); - - String token = getAuthToken(admin.getEmail(), password); - - // Delete bitstream1 - getClient(token).perform(delete("/api/core/bitstreams") - .contentType(TEXT_URI_LIST) - .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream1.getID())) - .andExpect(status().is(204)); - - // Verify 404 after delete for bitstream1 - getClient(token).perform(get("/api/core/bitstreams/" + bitstream1.getID())) - .andExpect(status().isNotFound()); - - // check that bitstream2 still exists - getClient().perform(get("/api/core/bitstreams/" + bitstream2.getID())) - .andExpect(status().isOk()) - .andExpect(jsonPath("$", HalMatcher.matchNoEmbeds())); - - // check that bitstream3 still exists - getClient().perform(get("/api/core/bitstreams/" + bitstream3.getID())) - .andExpect(status().isOk()) - .andExpect(jsonPath("$", HalMatcher.matchNoEmbeds())) - ; - } - - @Test - public void deleteListAllBitstreams() throws Exception { - - //We turn off the authorization system in order to create the structure as defined below - context.turnOffAuthorisationSystem(); - - //** GIVEN ** - //1. A community-collection structure with one parent community with sub-community and one collection. - parentCommunity = CommunityBuilder.createCommunity(context) - .withName("Parent Community") - .build(); - Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) - .withName("Sub Community") - .build(); - Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); - - //2. One public items that is readable by Anonymous - Item publicItem1 = ItemBuilder.createItem(context, col1) - .withTitle("Test") - .withIssueDate("2010-10-17") - .withAuthor("Smith, Donald") - .withSubject("ExtraEntry") - .build(); - - // Add 3 bitstreams to the item - String bitstreamContent1 = "ThisIsSomeDummyText1"; - Bitstream bitstream1 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent1, CharEncoding.UTF_8)) { - bitstream1 = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream1") - .withDescription("Description1") - .withMimeType("text/plain") - .build(); - } - - String bitstreamContent2 = "ThisIsSomeDummyText2"; - Bitstream bitstream2 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent2, CharEncoding.UTF_8)) { - bitstream2 = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream2") - .withDescription("Description2") - .withMimeType("text/plain") - .build(); - } - - String bitstreamContent3 = "ThisIsSomeDummyText3"; - Bitstream bitstream3 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent3, CharEncoding.UTF_8)) { - bitstream3 = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream3") - .withDescription("Description3") - .withMimeType("text/plain") - .build(); - } - - context.restoreAuthSystemState(); - - String token = getAuthToken(admin.getEmail(), password); - - // Delete all bitstreams - getClient(token).perform(delete("/api/core/bitstreams") - .contentType(TEXT_URI_LIST) - .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream1.getID() - + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream2.getID() - + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream3.getID())) - .andExpect(status().is(204)); - - // Verify 404 after delete for bitstream1 - getClient(token).perform(get("/api/core/bitstreams/" + bitstream1.getID())) - .andExpect(status().isNotFound()); - - // Verify 404 after delete for bitstream2 - getClient(token).perform(get("/api/core/bitstreams/" + bitstream2.getID())) - .andExpect(status().isNotFound()); - - // Verify 404 after delete for bitstream3 - getClient(token).perform(get("/api/core/bitstreams/" + bitstream3.getID())) - .andExpect(status().isNotFound()); - } - - @Test - public void deleteListForbidden() throws Exception { - - //We turn off the authorization system in order to create the structure as defined below - context.turnOffAuthorisationSystem(); - - //** GIVEN ** - //1. A community-collection structure with one parent community with sub-community and one collection. - parentCommunity = CommunityBuilder.createCommunity(context) - .withName("Parent Community") - .build(); - Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) - .withName("Sub Community") - .build(); - Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); - - //2. One public items that is readable by Anonymous - Item publicItem1 = ItemBuilder.createItem(context, col1) - .withTitle("Test") - .withIssueDate("2010-10-17") - .withAuthor("Smith, Donald") - .withSubject("ExtraEntry") - .build(); - - // Add 3 bitstreams to the item - String bitstreamContent1 = "ThisIsSomeDummyText1"; - Bitstream bitstream1 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent1, CharEncoding.UTF_8)) { - bitstream1 = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream1") - .withDescription("Description1") - .withMimeType("text/plain") - .build(); - } - - String bitstreamContent2 = "ThisIsSomeDummyText2"; - Bitstream bitstream2 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent2, CharEncoding.UTF_8)) { - bitstream2 = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream2") - .withDescription("Description2") - .withMimeType("text/plain") - .build(); - } - - String bitstreamContent3 = "ThisIsSomeDummyText3"; - Bitstream bitstream3 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent3, CharEncoding.UTF_8)) { - bitstream3 = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream3") - .withDescription("Description3") - .withMimeType("text/plain") - .build(); - } - - context.restoreAuthSystemState(); - - String token = getAuthToken(eperson.getEmail(), password); - - // Delete using an unauthorized user - getClient(token).perform(delete("/api/core/bitstreams") - .contentType(TEXT_URI_LIST) - .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream1.getID() - + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream2.getID() - + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream3.getID())) - .andExpect(status().isForbidden()); - - // Verify the bitstreams are still here - getClient().perform(get("/api/core/bitstreams/" + bitstream1.getID())) - .andExpect(status().isOk()); - - getClient().perform(get("/api/core/bitstreams/" + bitstream2.getID())) - .andExpect(status().isOk()); - - getClient().perform(get("/api/core/bitstreams/" + bitstream3.getID())) - .andExpect(status().isOk()); - } - - @Test - public void deleteListUnauthorized() throws Exception { - - //We turn off the authorization system in order to create the structure as defined below - context.turnOffAuthorisationSystem(); - - //** GIVEN ** - //1. A community-collection structure with one parent community with sub-community and one collection. - parentCommunity = CommunityBuilder.createCommunity(context) - .withName("Parent Community") - .build(); - Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) - .withName("Sub Community") - .build(); - Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); - - //2. One public items that is readable by Anonymous - Item publicItem1 = ItemBuilder.createItem(context, col1) - .withTitle("Test") - .withIssueDate("2010-10-17") - .withAuthor("Smith, Donald") - .withSubject("ExtraEntry") - .build(); - - // Add 3 bitstreams to the item - String bitstreamContent1 = "ThisIsSomeDummyText1"; - Bitstream bitstream1 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent1, CharEncoding.UTF_8)) { - bitstream1 = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream1") - .withDescription("Description1") - .withMimeType("text/plain") - .build(); - } - - String bitstreamContent2 = "ThisIsSomeDummyText2"; - Bitstream bitstream2 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent2, CharEncoding.UTF_8)) { - bitstream2 = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream2") - .withDescription("Description2") - .withMimeType("text/plain") - .build(); - } - - String bitstreamContent3 = "ThisIsSomeDummyText3"; - Bitstream bitstream3 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent3, CharEncoding.UTF_8)) { - bitstream3 = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream3") - .withDescription("Description3") - .withMimeType("text/plain") - .build(); - } - - context.restoreAuthSystemState(); - - // Delete as anonymous - getClient().perform(delete("/api/core/bitstreams") - .contentType(TEXT_URI_LIST) - .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream1.getID() - + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream2.getID() - + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream3.getID())) - .andExpect(status().isUnauthorized()); - - // Verify the bitstreams are still here - getClient().perform(get("/api/core/bitstreams/" + bitstream1.getID())) - .andExpect(status().isOk()); - - getClient().perform(get("/api/core/bitstreams/" + bitstream2.getID())) - .andExpect(status().isOk()); - - getClient().perform(get("/api/core/bitstreams/" + bitstream3.getID())) - .andExpect(status().isOk()); - } - - @Test - public void deleteListEmpty() throws Exception { - - //We turn off the authorization system in order to create the structure as defined below - context.turnOffAuthorisationSystem(); - - //** GIVEN ** - //1. A community-collection structure with one parent community with sub-community and one collection. - parentCommunity = CommunityBuilder.createCommunity(context) - .withName("Parent Community") - .build(); - Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) - .withName("Sub Community") - .build(); - Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); - - //2. One public items that is readable by Anonymous - Item publicItem1 = ItemBuilder.createItem(context, col1) - .withTitle("Test") - .withIssueDate("2010-10-17") - .withAuthor("Smith, Donald") - .withSubject("ExtraEntry") - .build(); - - // Add 3 bitstreams to the item - String bitstreamContent1 = "ThisIsSomeDummyText1"; - Bitstream bitstream1 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent1, CharEncoding.UTF_8)) { - bitstream1 = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream1") - .withDescription("Description1") - .withMimeType("text/plain") - .build(); - } - - String bitstreamContent2 = "ThisIsSomeDummyText2"; - Bitstream bitstream2 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent2, CharEncoding.UTF_8)) { - bitstream2 = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream2") - .withDescription("Description2") - .withMimeType("text/plain") - .build(); - } - - String bitstreamContent3 = "ThisIsSomeDummyText3"; - Bitstream bitstream3 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent3, CharEncoding.UTF_8)) { - bitstream3 = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream3") - .withDescription("Description3") - .withMimeType("text/plain") - .build(); - } - - context.restoreAuthSystemState(); - - String token = getAuthToken(admin.getEmail(), password); - - // Delete with empty list throws 404 - getClient(token).perform(delete("/api/core/bitstreams") - .contentType(TEXT_URI_LIST) - .content("")) - .andExpect(status().isNotFound()); - - // Verify the bitstreams are still here - getClient(token).perform(get("/api/core/bitstreams/" + bitstream1.getID())) - .andExpect(status().isOk()); - - getClient(token).perform(get("/api/core/bitstreams/" + bitstream2.getID())) - .andExpect(status().isOk()); - - getClient(token).perform(get("/api/core/bitstreams/" + bitstream3.getID())) - .andExpect(status().isOk()); - } - - @Test - public void deleteListNotBitstream() throws Exception { - - //We turn off the authorization system in order to create the structure as defined below - context.turnOffAuthorisationSystem(); - - //** GIVEN ** - //1. A community-collection structure with one parent community with sub-community and one collection. - parentCommunity = CommunityBuilder.createCommunity(context) - .withName("Parent Community") - .build(); - Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) - .withName("Sub Community") - .build(); - Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); - - //2. One public items that is readable by Anonymous - Item publicItem1 = ItemBuilder.createItem(context, col1) - .withTitle("Test") - .withIssueDate("2010-10-17") - .withAuthor("Smith, Donald") - .withSubject("ExtraEntry") - .build(); - - // Add 3 bitstreams to the item - String bitstreamContent1 = "ThisIsSomeDummyText1"; - Bitstream bitstream1 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent1, CharEncoding.UTF_8)) { - bitstream1 = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream1") - .withDescription("Description1") - .withMimeType("text/plain") - .build(); - } - - String bitstreamContent2 = "ThisIsSomeDummyText2"; - Bitstream bitstream2 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent2, CharEncoding.UTF_8)) { - bitstream2 = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream2") - .withDescription("Description2") - .withMimeType("text/plain") - .build(); - } - - String bitstreamContent3 = "ThisIsSomeDummyText3"; - Bitstream bitstream3 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent3, CharEncoding.UTF_8)) { - bitstream3 = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream3") - .withDescription("Description3") - .withMimeType("text/plain") - .build(); - } - - context.restoreAuthSystemState(); - - String token = getAuthToken(admin.getEmail(), password); - - // Delete with list containing non-Bitstream throws 422 - getClient(token).perform(delete("/api/core/bitstreams") - .contentType(TEXT_URI_LIST) - .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream1.getID() - + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream2.getID() - + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream3.getID() - + " \n http://localhost:8080/server/api/core/items/" + publicItem1.getID())) - .andExpect(status().is(422)); - - // Verify the bitstreams are still here - getClient(token).perform(get("/api/core/bitstreams/" + bitstream1.getID())) - .andExpect(status().isOk()); - - getClient(token).perform(get("/api/core/bitstreams/" + bitstream2.getID())) - .andExpect(status().isOk()); - - getClient(token).perform(get("/api/core/bitstreams/" + bitstream3.getID())) - .andExpect(status().isOk()); - } - - @Test - public void deleteListDifferentItems() throws Exception { - - //We turn off the authorization system in order to create the structure as defined below - context.turnOffAuthorisationSystem(); - - //** GIVEN ** - //1. A community-collection structure with one parent community with sub-community and one collection. - parentCommunity = CommunityBuilder.createCommunity(context) - .withName("Parent Community") - .build(); - Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) - .withName("Sub Community") - .build(); - Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); - - //2. Two public items that is readable by Anonymous - Item publicItem1 = ItemBuilder.createItem(context, col1) - .withTitle("Test") - .withIssueDate("2010-10-17") - .withAuthor("Smith, Donald") - .withSubject("ExtraEntry") - .build(); - - Item publicItem2 = ItemBuilder.createItem(context, col1) - .withTitle("Test") - .withIssueDate("2010-10-17") - .withAuthor("Smith, Donald") - .withSubject("ExtraEntry") - .build(); - - // Add 1 bitstream to each item - String bitstreamContent1 = "ThisIsSomeDummyText1"; - Bitstream bitstream1 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent1, CharEncoding.UTF_8)) { - bitstream1 = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream1") - .withDescription("Description1") - .withMimeType("text/plain") - .build(); - } - - String bitstreamContent2 = "ThisIsSomeDummyText2"; - Bitstream bitstream2 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent2, CharEncoding.UTF_8)) { - bitstream2 = BitstreamBuilder. - createBitstream(context, publicItem2, is) - .withName("Bitstream2") - .withDescription("Description2") - .withMimeType("text/plain") - .build(); - } - - context.restoreAuthSystemState(); - - String token = getAuthToken(admin.getEmail(), password); - - // Delete with list containing Bitstreams from different items throws 422 - getClient(token).perform(delete("/api/core/bitstreams") - .contentType(TEXT_URI_LIST) - .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream1.getID() - + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream2.getID())) - .andExpect(status().is(422)); - - // Verify the bitstreams are still here - getClient(token).perform(get("/api/core/bitstreams/" + bitstream1.getID())) - .andExpect(status().isOk()); - - getClient(token).perform(get("/api/core/bitstreams/" + bitstream2.getID())) - .andExpect(status().isOk()); - - } - - @Test - public void deleteListLogo() throws Exception { - // We turn off the authorization system in order to create the structure as defined below - context.turnOffAuthorisationSystem(); - - // ** GIVEN ** - // 1. A community with a logo - parentCommunity = CommunityBuilder.createCommunity(context).withName("Community").withLogo("logo_community") - .build(); - - // 2. A collection with a logo - Collection col = CollectionBuilder.createCollection(context, parentCommunity).withName("Collection") - .withLogo("logo_collection").build(); - - context.restoreAuthSystemState(); - - String token = getAuthToken(admin.getEmail(), password); - - // trying to DELETE parentCommunity logo and collection logo should work - // we have to delete them separately otherwise it will throw 422 as they belong to different items - getClient(token).perform(delete("/api/core/bitstreams") - .contentType(TEXT_URI_LIST) - .content("http://localhost:8080/server/api/core/bitstreams/" + parentCommunity.getLogo().getID())) - .andExpect(status().is(204)); - - getClient(token).perform(delete("/api/core/bitstreams") - .contentType(TEXT_URI_LIST) - .content("http://localhost:8080/server/api/core/bitstreams/" + col.getLogo().getID())) - .andExpect(status().is(204)); - - // Verify 404 after delete for parentCommunity logo - getClient(token).perform(get("/api/core/bitstreams/" + parentCommunity.getLogo().getID())) - .andExpect(status().isNotFound()); - - // Verify 404 after delete for collection logo - getClient(token).perform(get("/api/core/bitstreams/" + col.getLogo().getID())) - .andExpect(status().isNotFound()); - } - - @Test - public void deleteListMissing() throws Exception { - String token = getAuthToken(admin.getEmail(), password); - - // Delete - getClient(token).perform(delete("/api/core/bitstreams") - .contentType(TEXT_URI_LIST) - .content("http://localhost:8080/server/api/core/bitstreams/1c11f3f1-ba1f-4f36-908a-3f1ea9a557eb")) - .andExpect(status().isNotFound()); - - // Verify 404 after failed delete - getClient(token).perform(delete("/api/core/bitstreams") - .contentType(TEXT_URI_LIST) - .content("http://localhost:8080/server/api/core/bitstreams/1c11f3f1-ba1f-4f36-908a-3f1ea9a557eb")) - .andExpect(status().isNotFound()); - } - - @Test - public void deleteListOneMissing() throws Exception { - - //We turn off the authorization system in order to create the structure as defined below - context.turnOffAuthorisationSystem(); - - //** GIVEN ** - //1. A community-collection structure with one parent community with sub-community and one collection. - parentCommunity = CommunityBuilder.createCommunity(context) - .withName("Parent Community") - .build(); - Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) - .withName("Sub Community") - .build(); - Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); - - //2. One public items that is readable by Anonymous - Item publicItem1 = ItemBuilder.createItem(context, col1) - .withTitle("Test") - .withIssueDate("2010-10-17") - .withAuthor("Smith, Donald") - .withSubject("ExtraEntry") - .build(); - - // Add 3 bitstreams to the item - String bitstreamContent1 = "ThisIsSomeDummyText1"; - Bitstream bitstream1 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent1, CharEncoding.UTF_8)) { - bitstream1 = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream1") - .withDescription("Description1") - .withMimeType("text/plain") - .build(); - } - - String bitstreamContent2 = "ThisIsSomeDummyText2"; - Bitstream bitstream2 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent2, CharEncoding.UTF_8)) { - bitstream2 = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream2") - .withDescription("Description2") - .withMimeType("text/plain") - .build(); - } - - String bitstreamContent3 = "ThisIsSomeDummyText3"; - Bitstream bitstream3 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent3, CharEncoding.UTF_8)) { - bitstream3 = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream3") - .withDescription("Description3") - .withMimeType("text/plain") - .build(); - } - - context.restoreAuthSystemState(); - - String token = getAuthToken(admin.getEmail(), password); - - // Delete all bitstreams and a missing bitstream returns 404 - getClient(token).perform(delete("/api/core/bitstreams") - .contentType(TEXT_URI_LIST) - .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream1.getID() - + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream2.getID() - + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream3.getID() - + " \n http://localhost:8080/server/api/core/bitstreams/1c11f3f1-ba1f-4f36-908a-3f1ea9a557eb")) - .andExpect(status().isNotFound()); - - // Verify the bitstreams are still here - getClient(token).perform(get("/api/core/bitstreams/" + bitstream1.getID())) - .andExpect(status().isOk()); - - getClient(token).perform(get("/api/core/bitstreams/" + bitstream2.getID())) - .andExpect(status().isOk()); - - getClient(token).perform(get("/api/core/bitstreams/" + bitstream3.getID())) - .andExpect(status().isOk()); - } - - @Test - public void deleteListOneMissingDifferentItems() throws Exception { - - //We turn off the authorization system in order to create the structure as defined below - context.turnOffAuthorisationSystem(); - - //** GIVEN ** - //1. A community-collection structure with one parent community with sub-community and one collection. - parentCommunity = CommunityBuilder.createCommunity(context) - .withName("Parent Community") - .build(); - Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) - .withName("Sub Community") - .build(); - Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); - - //2. Two public items that is readable by Anonymous - Item publicItem1 = ItemBuilder.createItem(context, col1) - .withTitle("Test") - .withIssueDate("2010-10-17") - .withAuthor("Smith, Donald") - .withSubject("ExtraEntry") - .build(); - - Item publicItem2 = ItemBuilder.createItem(context, col1) - .withTitle("Test") - .withIssueDate("2010-10-17") - .withAuthor("Smith, Donald") - .withSubject("ExtraEntry") - .build(); - - // Add 1 bitstream to each item - String bitstreamContent1 = "ThisIsSomeDummyText1"; - Bitstream bitstream1 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent1, CharEncoding.UTF_8)) { - bitstream1 = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream1") - .withDescription("Description1") - .withMimeType("text/plain") - .build(); - } - - String bitstreamContent2 = "ThisIsSomeDummyText2"; - Bitstream bitstream2 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent2, CharEncoding.UTF_8)) { - bitstream2 = BitstreamBuilder. - createBitstream(context, publicItem2, is) - .withName("Bitstream2") - .withDescription("Description2") - .withMimeType("text/plain") - .build(); - } - - context.restoreAuthSystemState(); - - String token = getAuthToken(admin.getEmail(), password); - - // Delete all bitstreams and a missing bitstream returns 404 - getClient(token).perform(delete("/api/core/bitstreams") - .contentType(TEXT_URI_LIST) - .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream1.getID() - + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream2.getID() - + " \n http://localhost:8080/server/api/core/bitstreams/1c11f3f1-ba1f-4f36-908a-3f1ea9a557eb")) - .andExpect(status().isNotFound()); - - // Verify the bitstreams are still here - getClient(token).perform(get("/api/core/bitstreams/" + bitstream1.getID())) - .andExpect(status().isOk()); - - getClient(token).perform(get("/api/core/bitstreams/" + bitstream2.getID())) - .andExpect(status().isOk()); - - } - - @Test - public void deleteListDeleted() throws Exception { - //We turn off the authorization system in order to create the structure as defined below - context.turnOffAuthorisationSystem(); - - //** GIVEN ** - //1. A community-collection structure with one parent community with sub-community and one collection. - parentCommunity = CommunityBuilder.createCommunity(context) - .withName("Parent Community") - .build(); - Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) - .withName("Sub Community") - .build(); - Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); - - //2. One public items that is readable by Anonymous - Item publicItem1 = ItemBuilder.createItem(context, col1) - .withTitle("Test") - .withIssueDate("2010-10-17") - .withAuthor("Smith, Donald") - .withSubject("ExtraEntry") - .build(); - - String bitstreamContent = "ThisIsSomeDummyText"; - //Add a bitstream to an item - Bitstream bitstream = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { - bitstream = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream") - .withDescription("Description") - .withMimeType("text/plain") - .build(); - } - - context.restoreAuthSystemState(); - - String token = getAuthToken(admin.getEmail(), password); - - // Delete - getClient(token).perform(delete("/api/core/bitstreams") - .contentType(TEXT_URI_LIST) - .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream.getID())) - .andExpect(status().is(204)); - - // Verify 404 when trying to delete a non-existing, already deleted, bitstream - getClient(token).perform(delete("/api/core/bitstreams") - .contentType(TEXT_URI_LIST) - .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream.getID())) - .andExpect(status().is(422)); - } - - @Test - public void deleteListOneDeleted() throws Exception { - //We turn off the authorization system in order to create the structure as defined below - context.turnOffAuthorisationSystem(); - - //** GIVEN ** - //1. A community-collection structure with one parent community with sub-community and one collection. - parentCommunity = CommunityBuilder.createCommunity(context) - .withName("Parent Community") - .build(); - Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) - .withName("Sub Community") - .build(); - Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); - - //2. One public items that is readable by Anonymous - Item publicItem1 = ItemBuilder.createItem(context, col1) - .withTitle("Test") - .withIssueDate("2010-10-17") - .withAuthor("Smith, Donald") - .withSubject("ExtraEntry") - .build(); - - // Add 3 bitstreams to the item - String bitstreamContent1 = "ThisIsSomeDummyText1"; - Bitstream bitstream1 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent1, CharEncoding.UTF_8)) { - bitstream1 = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream1") - .withDescription("Description1") - .withMimeType("text/plain") - .build(); - } - - String bitstreamContent2 = "ThisIsSomeDummyText2"; - Bitstream bitstream2 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent2, CharEncoding.UTF_8)) { - bitstream2 = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream2") - .withDescription("Description2") - .withMimeType("text/plain") - .build(); - } - - String bitstreamContent3 = "ThisIsSomeDummyText3"; - Bitstream bitstream3 = null; - try (InputStream is = IOUtils.toInputStream(bitstreamContent3, CharEncoding.UTF_8)) { - bitstream3 = BitstreamBuilder. - createBitstream(context, publicItem1, is) - .withName("Bitstream3") - .withDescription("Description3") - .withMimeType("text/plain") - .build(); - } - - context.restoreAuthSystemState(); - - String token = getAuthToken(admin.getEmail(), password); - - // Delete bitstream1 - getClient(token).perform(delete("/api/core/bitstreams") - .contentType(TEXT_URI_LIST) - .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream1.getID())) - .andExpect(status().is(204)); - - // Verify 404 when trying to delete a non-existing, already deleted, bitstream - getClient(token).perform(delete("/api/core/bitstreams") - .contentType(TEXT_URI_LIST) - .content("http://localhost:8080/server/api/core/bitstreams/" + bitstream1.getID() - + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream2.getID() - + " \n http://localhost:8080/server/api/core/bitstreams/" + bitstream3.getID())) - .andExpect(status().is(422)); - } - @Test public void patchBitstreamMetadataAuthorized() throws Exception { runPatchMetadataTests(admin, 200); From 648b27befbe992515319a79d78d01b8327c338d4 Mon Sep 17 00:00:00 2001 From: Nona Luypaert Date: Fri, 5 May 2023 14:34:53 +0200 Subject: [PATCH 045/125] 101549: Make BrowseIndexRestRepository#findAll also return hierarchicalBrowses --- .../java/org/dspace/browse/BrowseIndex.java | 8 ++-- .../DSpaceControlledVocabularyIndex.java | 4 +- .../rest/converter/BrowseIndexConverter.java | 24 ++++++++--- .../HierarchicalBrowseConverter.java | 42 ------------------- .../repository/BrowseIndexRestRepository.java | 6 ++- 5 files changed, 32 insertions(+), 52 deletions(-) delete mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/HierarchicalBrowseConverter.java diff --git a/dspace-api/src/main/java/org/dspace/browse/BrowseIndex.java b/dspace-api/src/main/java/org/dspace/browse/BrowseIndex.java index 8d065c21ce..6c38c8dd66 100644 --- a/dspace-api/src/main/java/org/dspace/browse/BrowseIndex.java +++ b/dspace-api/src/main/java/org/dspace/browse/BrowseIndex.java @@ -22,11 +22,13 @@ import org.dspace.sort.SortOption; * This class holds all the information about a specifically configured * BrowseIndex. It is responsible for parsing the configuration, understanding * about what sort options are available, and what the names of the database - * tables that hold all the information are actually called. + * tables that hold all the information are actually called. Hierarchical browse + * indexes also contain information about the vocabulary they're using, see: + * {@link org.dspace.content.authority.DSpaceControlledVocabularyIndex} * * @author Richard Jones */ -public final class BrowseIndex { +public class BrowseIndex { /** the configuration number, as specified in the config */ /** * used for single metadata browse tables for generating the table name @@ -102,7 +104,7 @@ public final class BrowseIndex { * * @param baseName The base of the table name */ - private BrowseIndex(String baseName) { + protected BrowseIndex(String baseName) { try { number = -1; tableBaseName = baseName; diff --git a/dspace-api/src/main/java/org/dspace/content/authority/DSpaceControlledVocabularyIndex.java b/dspace-api/src/main/java/org/dspace/content/authority/DSpaceControlledVocabularyIndex.java index 6f350fc71e..bf8194dbd5 100644 --- a/dspace-api/src/main/java/org/dspace/content/authority/DSpaceControlledVocabularyIndex.java +++ b/dspace-api/src/main/java/org/dspace/content/authority/DSpaceControlledVocabularyIndex.java @@ -9,6 +9,7 @@ package org.dspace.content.authority; import java.util.Set; +import org.dspace.browse.BrowseIndex; import org.dspace.discovery.configuration.DiscoverySearchFilterFacet; /** @@ -18,7 +19,7 @@ import org.dspace.discovery.configuration.DiscoverySearchFilterFacet; * * @author Marie Verdonck (Atmire) on 04/05/2023 */ -public class DSpaceControlledVocabularyIndex { +public class DSpaceControlledVocabularyIndex extends BrowseIndex { protected DSpaceControlledVocabulary vocabulary; protected Set metadataFields; @@ -26,6 +27,7 @@ public class DSpaceControlledVocabularyIndex { public DSpaceControlledVocabularyIndex(DSpaceControlledVocabulary controlledVocabulary, Set metadataFields, DiscoverySearchFilterFacet facetConfig) { + super(controlledVocabulary.vocabularyName); this.vocabulary = controlledVocabulary; this.metadataFields = metadataFields; this.facetConfig = facetConfig; diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/BrowseIndexConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/BrowseIndexConverter.java index 1e2899b396..2595968d4d 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/BrowseIndexConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/BrowseIndexConverter.java @@ -8,6 +8,7 @@ package org.dspace.app.rest.converter; import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_FLAT; +import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_HIERARCHICAL; import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_VALUE_LIST; import java.util.ArrayList; @@ -16,6 +17,7 @@ import java.util.List; import org.dspace.app.rest.model.BrowseIndexRest; import org.dspace.app.rest.projection.Projection; import org.dspace.browse.BrowseIndex; +import org.dspace.content.authority.DSpaceControlledVocabularyIndex; import org.dspace.sort.SortException; import org.dspace.sort.SortOption; import org.springframework.stereotype.Component; @@ -33,19 +35,29 @@ public class BrowseIndexConverter implements DSpaceConverter metadataList = new ArrayList(); - if (obj.isMetadataIndex()) { + String id = obj.getName(); + if (obj instanceof DSpaceControlledVocabularyIndex) { + DSpaceControlledVocabularyIndex vocObj = (DSpaceControlledVocabularyIndex) obj; + metadataList = new ArrayList<>(vocObj.getMetadataFields()); + id = vocObj.getVocabulary().getPluginInstanceName(); + bir.setFacetType(vocObj.getFacetConfig().getIndexFieldName()); + bir.setVocabulary(vocObj.getVocabulary().getPluginInstanceName()); + bir.setBrowseType(BROWSE_TYPE_HIERARCHICAL); + } else if (obj.isMetadataIndex()) { for (String s : obj.getMetadata().split(",")) { metadataList.add(s.trim()); } + bir.setDataType(obj.getDataType()); + bir.setOrder(obj.getDefaultOrder()); bir.setBrowseType(BROWSE_TYPE_VALUE_LIST); } else { metadataList.add(obj.getSortOption().getMetadata()); + bir.setDataType(obj.getDataType()); + bir.setOrder(obj.getDefaultOrder()); bir.setBrowseType(BROWSE_TYPE_FLAT); } + bir.setId(id); bir.setMetadataList(metadataList); List sortOptionsList = new ArrayList(); @@ -56,7 +68,9 @@ public class BrowseIndexConverter implements DSpaceConverter { - - @Override - public BrowseIndexRest convert(DSpaceControlledVocabularyIndex obj, Projection projection) { - BrowseIndexRest bir = new BrowseIndexRest(); - bir.setProjection(projection); - bir.setId(obj.getVocabulary().getPluginInstanceName()); - bir.setBrowseType(BrowseIndexRest.BROWSE_TYPE_HIERARCHICAL); - bir.setFacetType(obj.getFacetConfig().getIndexFieldName()); - bir.setVocabulary(obj.getVocabulary().getPluginInstanceName()); - bir.setMetadataList(new ArrayList<>(obj.getMetadataFields())); - return bir; - } - - @Override - public Class getModelClass() { - return DSpaceControlledVocabularyIndex.class; - } -} diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseIndexRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseIndexRestRepository.java index c87cbc6c03..b166bffda7 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseIndexRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BrowseIndexRestRepository.java @@ -7,6 +7,7 @@ */ package org.dspace.app.rest.repository; +import java.util.ArrayList; import java.util.Arrays; import java.util.List; @@ -68,7 +69,10 @@ public class BrowseIndexRestRepository extends DSpaceRestRepository findAll(Context context, Pageable pageable) { try { - List indexes = Arrays.asList(BrowseIndex.getBrowseIndices()); + List indexes = new ArrayList<>(Arrays.asList(BrowseIndex.getBrowseIndices())); + choiceAuthorityService.getChoiceAuthoritiesNames() + .stream().filter(name -> choiceAuthorityService.getVocabularyIndex(name) != null) + .forEach(name -> indexes.add(choiceAuthorityService.getVocabularyIndex(name))); return converter.toRestPage(indexes, pageable, indexes.size(), utils.obtainProjection()); } catch (BrowseException e) { throw new RuntimeException(e.getMessage(), e); From 999fb46e8dfafe82313d6b19441ec34075a2a4c8 Mon Sep 17 00:00:00 2001 From: Jens Vannerum Date: Fri, 5 May 2023 15:10:12 +0200 Subject: [PATCH 046/125] 94299: Add IT --- .../operation/BitstreamRemoveOperation.java | 2 +- .../app/rest/BitstreamRestRepositoryIT.java | 229 ++++++++++++++++++ 2 files changed, 230 insertions(+), 1 deletion(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/BitstreamRemoveOperation.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/BitstreamRemoveOperation.java index 5d37e04cea..93c495a302 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/BitstreamRemoveOperation.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/BitstreamRemoveOperation.java @@ -38,7 +38,7 @@ import org.springframework.stereotype.Component; public class BitstreamRemoveOperation extends PatchOperation { @Autowired BitstreamService bitstreamService; - private static final String OPERATION_PATH_BITSTREAM_REMOVE = "/bitstreams/"; + public static final String OPERATION_PATH_BITSTREAM_REMOVE = "/bitstreams/"; @Override public Bitstream perform(Context context, Bitstream resource, Operation operation) throws SQLException { diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java index f9c1e469fc..3b01b4eac2 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java @@ -7,22 +7,29 @@ */ package org.dspace.app.rest; +import static javax.servlet.http.HttpServletResponse.SC_NOT_FOUND; +import static javax.servlet.http.HttpServletResponse.SC_OK; import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadata; import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadataDoesNotExist; +import static org.dspace.app.rest.repository.patch.operation.BitstreamRemoveOperation.OPERATION_PATH_BITSTREAM_REMOVE; import static org.dspace.core.Constants.WRITE; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; +import static org.junit.Assert.assertEquals; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.patch; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.content; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; import java.io.InputStream; +import java.util.ArrayList; import java.util.Comparator; import java.util.List; import java.util.UUID; +import javax.ws.rs.core.MediaType; import org.apache.commons.codec.CharEncoding; import org.apache.commons.io.IOUtils; @@ -30,6 +37,8 @@ import org.dspace.app.rest.matcher.BitstreamFormatMatcher; import org.dspace.app.rest.matcher.BitstreamMatcher; import org.dspace.app.rest.matcher.BundleMatcher; import org.dspace.app.rest.matcher.HalMatcher; +import org.dspace.app.rest.model.patch.Operation; +import org.dspace.app.rest.model.patch.RemoveOperation; import org.dspace.app.rest.test.AbstractControllerIntegrationTest; import org.dspace.app.rest.test.MetadataPatchSuite; import org.dspace.authorize.service.ResourcePolicyService; @@ -52,10 +61,13 @@ import org.dspace.core.Constants; import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; import org.dspace.eperson.service.GroupService; +import org.dspace.services.factory.DSpaceServicesFactory; import org.hamcrest.Matchers; +import org.junit.Assert; import org.junit.Ignore; import org.junit.Test; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.test.web.servlet.MvcResult; public class BitstreamRestRepositoryIT extends AbstractControllerIntegrationTest { @@ -2279,6 +2291,223 @@ public class BitstreamRestRepositoryIT extends AbstractControllerIntegrationTest )); } + @Test + public void deleteBitstreamsInBulk() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Collection collection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .build(); + Item publicItem1 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 1") + .build(); + Item publicItem2 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 2") + .build(); + String bitstreamContent = "This is an archived bitstream"; + Bitstream bitstream1 = null; + Bitstream bitstream2 = null; + Bitstream bitstream3 = null; + Bitstream bitstream4 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 1") + .withMimeType("text/plain") + .build(); + bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 2") + .withMimeType("text/plain") + .build(); + bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 3") + .withMimeType("text/plain") + .build(); + bitstream4 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 4") + .withMimeType("text/plain") + .build(); + } + context.restoreAuthSystemState(); + + // Add three out of four bitstreams to the list of bitstreams to be deleted + List ops = new ArrayList<>(); + RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID()); + ops.add(removeOp1); + RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID()); + ops.add(removeOp2); + RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID()); + ops.add(removeOp3); + String patchBody = getPatchContent(ops); + String token = getAuthToken(admin.getEmail(), password); + + Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4)); + + getClient(token).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isNoContent()); + + // Verify that only the three bitstreams were deleted and the fourth one still exists + Assert.assertTrue(bitstreamNotFound(token, bitstream1, bitstream2, bitstream3)); + Assert.assertTrue(bitstreamExists(token, bitstream4)); + } + + @Test + public void deleteBitstreamsInBulk_invalidUUID() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Collection collection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .build(); + Item publicItem1 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 1") + .build(); + Item publicItem2 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 2") + .build(); + + String bitstreamContent = "This is an archived bitstream"; + Bitstream bitstream1 = null; + Bitstream bitstream2 = null; + Bitstream bitstream3 = null; + Bitstream bitstream4 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 1") + .withMimeType("text/plain") + .build(); + bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 2") + .withMimeType("text/plain") + .build(); + bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 3") + .withMimeType("text/plain") + .build(); + bitstream4 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 4") + .withMimeType("text/plain") + .build(); + } + context.restoreAuthSystemState(); + + // Add three out of four bitstreams to the list of bitstreams to be deleted + // For the third bitstream, use an invalid UUID + List ops = new ArrayList<>(); + RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID()); + ops.add(removeOp1); + RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID()); + ops.add(removeOp2); + UUID randomUUID = UUID.randomUUID(); + RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + randomUUID); + ops.add(removeOp3); + String patchBody = getPatchContent(ops); + String token = getAuthToken(admin.getEmail(), password); + + Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4)); + + MvcResult result = getClient(token).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isUnprocessableEntity()) + .andReturn(); + + // Verify our custom error message is returned when an invalid UUID is used + assertEquals("Bitstream with uuid " + randomUUID + " could not be found in the repository", + result.getResponse().getErrorMessage()); + + // Verify that no bitstreams were deleted since the request was invalid + Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4)); + } + + @Test + public void deleteBitstreamsInBulk_invalidRequestSize() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Collection collection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .build(); + Item publicItem1 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 1") + .build(); + Item publicItem2 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 2") + .build(); + + String bitstreamContent = "This is an archived bitstream"; + Bitstream bitstream1 = null; + Bitstream bitstream2 = null; + Bitstream bitstream3 = null; + Bitstream bitstream4 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 1") + .withMimeType("text/plain") + .build(); + bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 2") + .withMimeType("text/plain") + .build(); + bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 3") + .withMimeType("text/plain") + .build(); + bitstream4 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 4") + .withMimeType("text/plain") + .build(); + } + context.restoreAuthSystemState(); + + // Add three out of four bitstreams to the list of bitstreams to be deleted + // But set the patch.operations.limit property to 2, so that the request is invalid + List ops = new ArrayList<>(); + RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID()); + ops.add(removeOp1); + RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID()); + ops.add(removeOp2); + RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID()); + ops.add(removeOp3); + String patchBody = getPatchContent(ops); + String token = getAuthToken(admin.getEmail(), password); + + Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4)); + DSpaceServicesFactory.getInstance().getConfigurationService().setProperty("patch.operations.limit", 2); + + getClient(token).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isBadRequest()); + + // Verify that no bitstreams were deleted since the request was invalid + Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4)); + } + + public boolean bitstreamExists(String token, Bitstream ...bitstreams) throws Exception { + for (Bitstream bitstream : bitstreams) { + if (getClient(token).perform(get("/api/core/bitstreams/" + bitstream.getID())) + .andReturn().getResponse().getStatus() != SC_OK) { + return false; + } + } + return true; + } + + public boolean bitstreamNotFound(String token, Bitstream ...bitstreams) throws Exception { + for (Bitstream bitstream : bitstreams) { + if (getClient(token).perform(get("/api/core/bitstreams/" + bitstream.getID())) + .andReturn().getResponse().getStatus() != SC_NOT_FOUND) { + return false; + } + } + return true; + } } From acb700c88774b1aea471b4bf08037a8dcfaa8be5 Mon Sep 17 00:00:00 2001 From: Nona Luypaert Date: Fri, 5 May 2023 15:55:49 +0200 Subject: [PATCH 047/125] 101549: Fix BrowseIndexMatcher and BrowsesResourceControllerIT --- .../app/rest/BrowsesResourceControllerIT.java | 26 +++++++++++--- .../app/rest/matcher/BrowseIndexMatcher.java | 34 ++++++++++++++++--- 2 files changed, 50 insertions(+), 10 deletions(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BrowsesResourceControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BrowsesResourceControllerIT.java index baf459408d..a5f4af102c 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BrowsesResourceControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BrowsesResourceControllerIT.java @@ -63,22 +63,23 @@ public class BrowsesResourceControllerIT extends AbstractControllerIntegrationTe //We expect the content type to be "application/hal+json;charset=UTF-8" .andExpect(content().contentType(contentType)) - //Our default Discovery config has 4 browse indexes so we expect this to be reflected in the page + //Our default Discovery config has 5 browse indexes, so we expect this to be reflected in the page // object .andExpect(jsonPath("$.page.size", is(20))) - .andExpect(jsonPath("$.page.totalElements", is(4))) + .andExpect(jsonPath("$.page.totalElements", is(5))) .andExpect(jsonPath("$.page.totalPages", is(1))) .andExpect(jsonPath("$.page.number", is(0))) - //The array of browse index should have a size 4 - .andExpect(jsonPath("$._embedded.browses", hasSize(4))) + //The array of browse index should have a size 5 + .andExpect(jsonPath("$._embedded.browses", hasSize(5))) //Check that all (and only) the default browse indexes are present .andExpect(jsonPath("$._embedded.browses", containsInAnyOrder( BrowseIndexMatcher.dateIssuedBrowseIndex("asc"), BrowseIndexMatcher.contributorBrowseIndex("asc"), BrowseIndexMatcher.titleBrowseIndex("asc"), - BrowseIndexMatcher.subjectBrowseIndex("asc") + BrowseIndexMatcher.subjectBrowseIndex("asc"), + BrowseIndexMatcher.hierarchicalBrowseIndex("srsc") ))) ; } @@ -125,6 +126,21 @@ public class BrowsesResourceControllerIT extends AbstractControllerIntegrationTe ; } + @Test + public void findBrowseByVocabulary() throws Exception { + //Use srsc as this vocabulary is included by default + //When we call the root endpoint + getClient().perform(get("/api/discover/browses/srsc")) + //The status has to be 200 OK + .andExpect(status().isOk()) + //We expect the content type to be "application/hal+json;charset=UTF-8" + .andExpect(content().contentType(contentType)) + + //Check that the JSON root matches the expected browse index + .andExpect(jsonPath("$", BrowseIndexMatcher.hierarchicalBrowseIndex("srsc"))) + ; + } + @Test public void findBrowseBySubject() throws Exception { //When we call the root endpoint diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/BrowseIndexMatcher.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/BrowseIndexMatcher.java index 82d611facf..80f27b6bbb 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/BrowseIndexMatcher.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/matcher/BrowseIndexMatcher.java @@ -8,6 +8,9 @@ package org.dspace.app.rest.matcher; import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasJsonPath; +import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_FLAT; +import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_HIERARCHICAL; +import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_VALUE_LIST; import static org.dspace.app.rest.test.AbstractControllerIntegrationTest.REST_SERVER_URL; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.contains; @@ -16,7 +19,6 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.text.IsEqualIgnoringCase.equalToIgnoringCase; import org.hamcrest.Matcher; -import org.hamcrest.Matchers; /** * Utility class to construct a Matcher for a browse index @@ -31,7 +33,8 @@ public class BrowseIndexMatcher { public static Matcher subjectBrowseIndex(final String order) { return allOf( hasJsonPath("$.metadata", contains("dc.subject.*")), - hasJsonPath("$.metadataBrowse", Matchers.is(true)), + hasJsonPath("$.browseType", equalToIgnoringCase(BROWSE_TYPE_VALUE_LIST)), + hasJsonPath("$.type", equalToIgnoringCase("browse")), hasJsonPath("$.dataType", equalToIgnoringCase("text")), hasJsonPath("$.order", equalToIgnoringCase(order)), hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")), @@ -44,7 +47,8 @@ public class BrowseIndexMatcher { public static Matcher titleBrowseIndex(final String order) { return allOf( hasJsonPath("$.metadata", contains("dc.title")), - hasJsonPath("$.metadataBrowse", Matchers.is(false)), + hasJsonPath("$.browseType", equalToIgnoringCase(BROWSE_TYPE_FLAT)), + hasJsonPath("$.type", equalToIgnoringCase("browse")), hasJsonPath("$.dataType", equalToIgnoringCase("title")), hasJsonPath("$.order", equalToIgnoringCase(order)), hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")), @@ -56,7 +60,8 @@ public class BrowseIndexMatcher { public static Matcher contributorBrowseIndex(final String order) { return allOf( hasJsonPath("$.metadata", contains("dc.contributor.*", "dc.creator")), - hasJsonPath("$.metadataBrowse", Matchers.is(true)), + hasJsonPath("$.browseType", equalToIgnoringCase(BROWSE_TYPE_VALUE_LIST)), + hasJsonPath("$.type", equalToIgnoringCase("browse")), hasJsonPath("$.dataType", equalToIgnoringCase("text")), hasJsonPath("$.order", equalToIgnoringCase(order)), hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")), @@ -69,7 +74,8 @@ public class BrowseIndexMatcher { public static Matcher dateIssuedBrowseIndex(final String order) { return allOf( hasJsonPath("$.metadata", contains("dc.date.issued")), - hasJsonPath("$.metadataBrowse", Matchers.is(false)), + hasJsonPath("$.browseType", equalToIgnoringCase(BROWSE_TYPE_FLAT)), + hasJsonPath("$.type", equalToIgnoringCase("browse")), hasJsonPath("$.dataType", equalToIgnoringCase("date")), hasJsonPath("$.order", equalToIgnoringCase(order)), hasJsonPath("$.sortOptions[*].name", containsInAnyOrder("title", "dateissued", "dateaccessioned")), @@ -77,4 +83,22 @@ public class BrowseIndexMatcher { hasJsonPath("$._links.items.href", is(REST_SERVER_URL + "discover/browses/dateissued/items")) ); } + + public static Matcher hierarchicalBrowseIndex(final String vocabulary) { + return allOf( + hasJsonPath("$.metadata", contains("dc.subject")), + hasJsonPath("$.browseType", equalToIgnoringCase(BROWSE_TYPE_HIERARCHICAL)), + hasJsonPath("$.type", equalToIgnoringCase("browse")), + hasJsonPath("$.facetType", equalToIgnoringCase("subject")), + hasJsonPath("$.vocabulary", equalToIgnoringCase(vocabulary)), + hasJsonPath("$._links.vocabulary.href", + is(REST_SERVER_URL + String.format("submission/vocabularies/%s/", vocabulary))), + hasJsonPath("$._links.items.href", + is(REST_SERVER_URL + String.format("discover/browses/%s/items", vocabulary))), + hasJsonPath("$._links.entries.href", + is(REST_SERVER_URL + String.format("discover/browses/%s/entries", vocabulary))), + hasJsonPath("$._links.self.href", + is(REST_SERVER_URL + String.format("discover/browses/%s", vocabulary))) + ); + } } From ab240d7f0ec07d9454ae925b2a03154c5cb2b80a Mon Sep 17 00:00:00 2001 From: Nona Luypaert Date: Fri, 5 May 2023 17:47:24 +0200 Subject: [PATCH 048/125] 101549: Fix BrowsesResourceControllerIT --- .../org/dspace/app/rest/BrowsesResourceControllerIT.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BrowsesResourceControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BrowsesResourceControllerIT.java index 301ffeab41..d1791ab872 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BrowsesResourceControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BrowsesResourceControllerIT.java @@ -8,6 +8,7 @@ package org.dspace.app.rest; import static org.dspace.app.rest.matcher.MetadataMatcher.matchMetadata; +import static org.dspace.app.rest.model.BrowseIndexRest.BROWSE_TYPE_VALUE_LIST; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; @@ -2158,7 +2159,7 @@ public class BrowsesResourceControllerIT extends AbstractControllerIntegrationTe // The browse definition ID should be "author" .andExpect(jsonPath("$.id", is("author"))) // It should be configured as a metadata browse - .andExpect(jsonPath("$.metadataBrowse", is(true))) + .andExpect(jsonPath("$.browseType", is(BROWSE_TYPE_VALUE_LIST))) ; } @@ -2175,7 +2176,7 @@ public class BrowsesResourceControllerIT extends AbstractControllerIntegrationTe // The browse definition ID should be "author" .andExpect(jsonPath("$.id", is("author"))) // It should be configured as a metadata browse - .andExpect(jsonPath("$.metadataBrowse", is(true))); + .andExpect(jsonPath("$.browseType", is(BROWSE_TYPE_VALUE_LIST))); } @Test From 5088447111dd10f8627fe61d5602e6e331a93ff1 Mon Sep 17 00:00:00 2001 From: Michael W Spalti Date: Sat, 6 May 2023 11:30:51 -0700 Subject: [PATCH 049/125] Updated solr query params. --- .../dspace/app/iiif/service/WordHighlightSolrSearch.java | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/dspace-iiif/src/main/java/org/dspace/app/iiif/service/WordHighlightSolrSearch.java b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/WordHighlightSolrSearch.java index da50f33582..9e6022548d 100644 --- a/dspace-iiif/src/main/java/org/dspace/app/iiif/service/WordHighlightSolrSearch.java +++ b/dspace-iiif/src/main/java/org/dspace/app/iiif/service/WordHighlightSolrSearch.java @@ -118,7 +118,8 @@ public class WordHighlightSolrSearch implements SearchAnnotationService { } /** - * Constructs a solr search URL. + * Constructs a solr search URL. Compatible with solr-ocrhighlighting-0.7.2. + * https://github.com/dbmdz/solr-ocrhighlighting/releases/tag/0.7.2 * * @param query the search terms * @param manifestId the id of the manifest in which to search @@ -132,8 +133,9 @@ public class WordHighlightSolrSearch implements SearchAnnotationService { solrQuery.set("hl.ocr.fl", "ocr_text"); solrQuery.set("hl.ocr.contextBlock", "line"); solrQuery.set("hl.ocr.contextSize", "2"); - solrQuery.set("hl.snippets", "10"); - solrQuery.set("hl.ocr.trackPages", "off"); + solrQuery.set("hl.snippets", "8192"); + solrQuery.set("hl.ocr.maxPassages", "8192"); + solrQuery.set("hl.ocr.trackPages", "on"); solrQuery.set("hl.ocr.limitBlock","page"); solrQuery.set("hl.ocr.absoluteHighlights", "true"); From 7971887b9a8603aa0039f8e5f3595520c0e65c3a Mon Sep 17 00:00:00 2001 From: Andrea Bollini Date: Fri, 21 Apr 2023 06:54:13 +0200 Subject: [PATCH 050/125] DURACOM-136 allow script execution by user other than admins --- .../ProcessCleanerConfiguration.java | 17 -- .../MetadataDeletionScriptConfiguration.java | 17 -- .../MetadataExportScriptConfiguration.java | 17 -- ...tadataExportSearchScriptConfiguration.java | 6 - .../MetadataImportScriptConfiguration.java | 16 -- .../harvest/HarvestScriptConfiguration.java | 14 - .../ItemExportScriptConfiguration.java | 17 -- .../ItemImportScriptConfiguration.java | 16 -- .../MediaFilterScriptConfiguration.java | 19 -- ...rDatabaseResyncCliScriptConfiguration.java | 6 - .../authorize/AuthorizeServiceImpl.java | 14 + .../authorize/service/AuthorizeService.java | 9 + .../org/dspace/content/dao/ProcessDAO.java | 23 ++ .../content/dao/impl/ProcessDAOImpl.java | 28 ++ .../curate/CurationScriptConfiguration.java | 43 ++- .../IndexDiscoveryScriptConfiguration.java | 17 -- .../OrcidBulkPushScriptConfiguration.java | 17 -- .../dspace/scripts/ProcessServiceImpl.java | 10 + .../org/dspace/scripts/ScriptServiceImpl.java | 2 +- .../configuration/ScriptConfiguration.java | 22 +- .../scripts/service/ProcessService.java | 22 ++ ...iledOpenUrlTrackerScriptConfiguration.java | 17 -- ...nFormsMigrationCliScriptConfiguration.java | 17 -- ...sionFormsMigrationScriptConfiguration.java | 36 ++- ...riptionEmailNotificationConfiguration.java | 16 -- .../builder/AbstractDSpaceObjectBuilder.java | 4 +- .../java/org/dspace/builder/ItemBuilder.java | 4 +- .../org/dspace/builder/ProcessBuilder.java | 3 + ...MockDSpaceRunnableScriptConfiguration.java | 17 -- .../app/rest/ScriptProcessesController.java | 15 +- .../repository/ProcessRestRepository.java | 16 ++ .../rest/repository/ScriptRestRepository.java | 30 +- .../app/rest/ProcessRestRepositoryIT.java | 26 ++ .../app/rest/ScriptRestRepositoryIT.java | 136 ++++++++- ...TypeConversionTestScriptConfiguration.java | 5 - .../org/dspace/curate/CurationScriptIT.java | 267 ++++++++++++++++++ ...MockDSpaceRunnableScriptConfiguration.java | 17 -- 37 files changed, 659 insertions(+), 319 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/administer/ProcessCleanerConfiguration.java b/dspace-api/src/main/java/org/dspace/administer/ProcessCleanerConfiguration.java index 8d189038d9..91dcfb5dfe 100644 --- a/dspace-api/src/main/java/org/dspace/administer/ProcessCleanerConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/administer/ProcessCleanerConfiguration.java @@ -7,33 +7,16 @@ */ package org.dspace.administer; -import java.sql.SQLException; - import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link ProcessCleaner} script. */ public class ProcessCleanerConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { if (options == null) { diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataDeletionScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataDeletionScriptConfiguration.java index 9ccd53944a..fb228e7041 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataDeletionScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataDeletionScriptConfiguration.java @@ -7,33 +7,16 @@ */ package org.dspace.app.bulkedit; -import java.sql.SQLException; - import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link MetadataDeletion} script. */ public class MetadataDeletionScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { if (options == null) { diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportScriptConfiguration.java index 31556afc8d..aa76c09c0a 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportScriptConfiguration.java @@ -7,22 +7,14 @@ */ package org.dspace.app.bulkedit; -import java.sql.SQLException; - import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link MetadataExport} script */ public class MetadataExportScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -39,15 +31,6 @@ public class MetadataExportScriptConfiguration extends this.dspaceRunnableClass = dspaceRunnableClass; } - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { if (options == null) { diff --git a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportSearchScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportSearchScriptConfiguration.java index 4e350562bc..4f2a225d3a 100644 --- a/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportSearchScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/bulkedit/MetadataExportSearchScriptConfiguration.java @@ -9,7 +9,6 @@ package org.dspace.app.bulkedit; import org.apache.commons.cli.Options; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; /** @@ -29,11 +28,6 @@ public class MetadataExportSearchScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -40,15 +33,6 @@ public class MetadataImportScriptConfiguration extends this.dspaceRunnableClass = dspaceRunnableClass; } - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { if (options == null) { diff --git a/dspace-api/src/main/java/org/dspace/app/harvest/HarvestScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/harvest/HarvestScriptConfiguration.java index 982973e47c..ff83c3ecb2 100644 --- a/dspace-api/src/main/java/org/dspace/app/harvest/HarvestScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/harvest/HarvestScriptConfiguration.java @@ -7,18 +7,11 @@ */ package org.dspace.app.harvest; -import java.sql.SQLException; - import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; public class HarvestScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; private Class dspaceRunnableClass; @@ -32,13 +25,6 @@ public class HarvestScriptConfiguration extends ScriptConfigu this.dspaceRunnableClass = dspaceRunnableClass; } - public boolean isAllowedToExecute(final Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } public Options getOptions() { Options options = new Options(); diff --git a/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportScriptConfiguration.java index cf70120d27..b37df5f5ea 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/itemexport/ItemExportScriptConfiguration.java @@ -7,14 +7,9 @@ */ package org.dspace.app.itemexport; -import java.sql.SQLException; - import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link ItemExport} script @@ -23,9 +18,6 @@ import org.springframework.beans.factory.annotation.Autowired; */ public class ItemExportScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -38,15 +30,6 @@ public class ItemExportScriptConfiguration extends ScriptC this.dspaceRunnableClass = dspaceRunnableClass; } - @Override - public boolean isAllowedToExecute(final Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { Options options = new Options(); diff --git a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportScriptConfiguration.java index a3149040c4..fd895e2f44 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportScriptConfiguration.java @@ -8,14 +8,10 @@ package org.dspace.app.itemimport; import java.io.InputStream; -import java.sql.SQLException; import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link ItemImport} script @@ -24,9 +20,6 @@ import org.springframework.beans.factory.annotation.Autowired; */ public class ItemImportScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -39,15 +32,6 @@ public class ItemImportScriptConfiguration extends ScriptC this.dspaceRunnableClass = dspaceRunnableClass; } - @Override - public boolean isAllowedToExecute(final Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { Options options = new Options(); diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterScriptConfiguration.java index 26347c56ee..867e684db8 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/MediaFilterScriptConfiguration.java @@ -7,25 +7,16 @@ */ package org.dspace.app.mediafilter; -import java.sql.SQLException; - import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; public class MediaFilterScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; private static final String MEDIA_FILTER_PLUGINS_KEY = "filter.plugins"; - @Override public Class getDspaceRunnableClass() { return dspaceRunnableClass; @@ -36,16 +27,6 @@ public class MediaFilterScriptConfiguration extends this.dspaceRunnableClass = dspaceRunnableClass; } - - @Override - public boolean isAllowedToExecute(final Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { Options options = new Options(); diff --git a/dspace-api/src/main/java/org/dspace/app/solrdatabaseresync/SolrDatabaseResyncCliScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/app/solrdatabaseresync/SolrDatabaseResyncCliScriptConfiguration.java index b238ccf061..067c76cce8 100644 --- a/dspace-api/src/main/java/org/dspace/app/solrdatabaseresync/SolrDatabaseResyncCliScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/app/solrdatabaseresync/SolrDatabaseResyncCliScriptConfiguration.java @@ -8,7 +8,6 @@ package org.dspace.app.solrdatabaseresync; import org.apache.commons.cli.Options; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; /** @@ -27,11 +26,6 @@ public class SolrDatabaseResyncCliScriptConfiguration extends ScriptConfiguratio this.dspaceRunnableClass = dspaceRunnableClass; } - @Override - public boolean isAllowedToExecute(Context context) { - return true; - } - @Override public Options getOptions() { if (options == null) { diff --git a/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java b/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java index 34543c078a..bfd933f482 100644 --- a/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java @@ -43,6 +43,7 @@ import org.dspace.discovery.SearchService; import org.dspace.discovery.SearchServiceException; import org.dspace.discovery.indexobject.IndexableCollection; import org.dspace.discovery.indexobject.IndexableCommunity; +import org.dspace.discovery.indexobject.IndexableItem; import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; import org.dspace.eperson.service.GroupService; @@ -809,6 +810,19 @@ public class AuthorizeServiceImpl implements AuthorizeService { return performCheck(context, "search.resourcetype:" + IndexableCollection.TYPE); } + /** + * Checks that the context's current user is an item admin in the site by querying the solr database. + * + * @param context context with the current user + * @return true if the current user is an item admin in the site + * false when this is not the case, or an exception occurred + * @throws java.sql.SQLException passed through. + */ + @Override + public boolean isItemAdmin(Context context) throws SQLException { + return performCheck(context, "search.resourcetype:" + IndexableItem.TYPE); + } + /** * Checks that the context's current user is a community or collection admin in the site. * diff --git a/dspace-api/src/main/java/org/dspace/authorize/service/AuthorizeService.java b/dspace-api/src/main/java/org/dspace/authorize/service/AuthorizeService.java index 36679f94c6..86ff236168 100644 --- a/dspace-api/src/main/java/org/dspace/authorize/service/AuthorizeService.java +++ b/dspace-api/src/main/java/org/dspace/authorize/service/AuthorizeService.java @@ -532,6 +532,15 @@ public interface AuthorizeService { */ boolean isCollectionAdmin(Context context) throws SQLException; + /** + * Checks that the context's current user is an item admin in the site by querying the solr database. + * + * @param context context with the current user + * @return true if the current user is an item admin in the site + * false when this is not the case, or an exception occurred + */ + boolean isItemAdmin(Context context) throws SQLException; + /** * Checks that the context's current user is a community or collection admin in the site. * diff --git a/dspace-api/src/main/java/org/dspace/content/dao/ProcessDAO.java b/dspace-api/src/main/java/org/dspace/content/dao/ProcessDAO.java index 69bac319c6..95ec40c7a5 100644 --- a/dspace-api/src/main/java/org/dspace/content/dao/ProcessDAO.java +++ b/dspace-api/src/main/java/org/dspace/content/dao/ProcessDAO.java @@ -14,6 +14,7 @@ import java.util.List; import org.dspace.content.ProcessStatus; import org.dspace.core.Context; import org.dspace.core.GenericDAO; +import org.dspace.eperson.EPerson; import org.dspace.scripts.Process; import org.dspace.scripts.ProcessQueryParameterContainer; @@ -97,4 +98,26 @@ public interface ProcessDAO extends GenericDAO { List findByStatusAndCreationTimeOlderThan(Context context, List statuses, Date date) throws SQLException; + /** + * Returns a list of all Process objects in the database by the given user. + * + * @param context The relevant DSpace context + * @param user The user to search for + * @param limit The limit for the amount of Processes returned + * @param offset The offset for the Processes to be returned + * @return The list of all Process objects in the Database + * @throws SQLException If something goes wrong + */ + List findByUser(Context context, EPerson user, int limit, int offset) throws SQLException; + + /** + * Count all the processes which is related to the given user. + * + * @param context The relevant DSpace context + * @param user The user to search for + * @return The number of results matching the query + * @throws SQLException If something goes wrong + */ + int countByUser(Context context, EPerson user) throws SQLException; + } diff --git a/dspace-api/src/main/java/org/dspace/content/dao/impl/ProcessDAOImpl.java b/dspace-api/src/main/java/org/dspace/content/dao/impl/ProcessDAOImpl.java index 23ce6ce381..d719b5006c 100644 --- a/dspace-api/src/main/java/org/dspace/content/dao/impl/ProcessDAOImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/dao/impl/ProcessDAOImpl.java @@ -24,6 +24,7 @@ import org.dspace.content.ProcessStatus; import org.dspace.content.dao.ProcessDAO; import org.dspace.core.AbstractHibernateDAO; import org.dspace.core.Context; +import org.dspace.eperson.EPerson; import org.dspace.scripts.Process; import org.dspace.scripts.ProcessQueryParameterContainer; import org.dspace.scripts.Process_; @@ -168,6 +169,33 @@ public class ProcessDAOImpl extends AbstractHibernateDAO implements Pro return list(context, criteriaQuery, false, Process.class, -1, -1); } + @Override + public List findByUser(Context context, EPerson user, int limit, int offset) throws SQLException { + CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); + CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Process.class); + + Root processRoot = criteriaQuery.from(Process.class); + criteriaQuery.select(processRoot); + criteriaQuery.where(criteriaBuilder.equal(processRoot.get(Process_.E_PERSON), user)); + + List orderList = new LinkedList<>(); + orderList.add(criteriaBuilder.desc(processRoot.get(Process_.PROCESS_ID))); + criteriaQuery.orderBy(orderList); + + return list(context, criteriaQuery, false, Process.class, limit, offset); + } + + @Override + public int countByUser(Context context, EPerson user) throws SQLException { + CriteriaBuilder criteriaBuilder = getCriteriaBuilder(context); + CriteriaQuery criteriaQuery = getCriteriaQuery(criteriaBuilder, Process.class); + + Root processRoot = criteriaQuery.from(Process.class); + criteriaQuery.select(processRoot); + criteriaQuery.where(criteriaBuilder.equal(processRoot.get(Process_.E_PERSON), user)); + return count(context, criteriaQuery, criteriaBuilder, processRoot); + } + } diff --git a/dspace-api/src/main/java/org/dspace/curate/CurationScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/curate/CurationScriptConfiguration.java index fefb4eb768..2587e6b025 100644 --- a/dspace-api/src/main/java/org/dspace/curate/CurationScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/curate/CurationScriptConfiguration.java @@ -8,12 +8,15 @@ package org.dspace.curate; import java.sql.SQLException; +import java.util.List; import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; +import org.dspace.content.DSpaceObject; import org.dspace.core.Context; +import org.dspace.handle.factory.HandleServiceFactory; +import org.dspace.handle.service.HandleService; +import org.dspace.scripts.DSpaceCommandLineParameter; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link Curation} script @@ -22,9 +25,6 @@ import org.springframework.beans.factory.annotation.Autowired; */ public class CurationScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -38,16 +38,37 @@ public class CurationScriptConfiguration extends ScriptConfi } /** - * Only admin can run Curation script via the scripts and processes endpoints. - * @param context The relevant DSpace context - * @return True if currentUser is admin, otherwise false + * Only repository admins or admins of the target object can run Curation script via the scripts + * and processes endpoints. + * + * @param context The relevant DSpace context + * @param commandLineParameters the parameters that will be used to start the process if known, + * null otherwise + * @return true if the currentUser is allowed to run the script with the specified parameters or + * at least in some case if the parameters are not yet known */ @Override - public boolean isAllowedToExecute(Context context) { + public boolean isAllowedToExecute(Context context, List commandLineParameters) { try { - return authorizeService.isAdmin(context); + if (commandLineParameters == null) { + return authorizeService.isAdmin(context) || authorizeService.isComColAdmin(context) + || authorizeService.isItemAdmin(context); + } else if (commandLineParameters.stream() + .map(DSpaceCommandLineParameter::getName) + .noneMatch("-i"::equals)) { + return authorizeService.isAdmin(context); + } else { + String dspaceObjectID = commandLineParameters.stream() + .filter(parameter -> "-i".equals(parameter.getName())) + .map(DSpaceCommandLineParameter::getValue) + .findFirst() + .get(); + HandleService handleService = HandleServiceFactory.getInstance().getHandleService(); + DSpaceObject dso = handleService.resolveToObject(context, dspaceObjectID); + return authorizeService.isAdmin(context, dso); + } } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); + throw new RuntimeException(e); } } diff --git a/dspace-api/src/main/java/org/dspace/discovery/IndexDiscoveryScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/discovery/IndexDiscoveryScriptConfiguration.java index 8bf3cf2aba..8707b733a6 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/IndexDiscoveryScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/discovery/IndexDiscoveryScriptConfiguration.java @@ -7,22 +7,14 @@ */ package org.dspace.discovery; -import java.sql.SQLException; - import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link IndexClient} script */ public class IndexDiscoveryScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -30,15 +22,6 @@ public class IndexDiscoveryScriptConfiguration extends Sc return dspaceRunnableClass; } - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { if (options == null) { diff --git a/dspace-api/src/main/java/org/dspace/orcid/script/OrcidBulkPushScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/orcid/script/OrcidBulkPushScriptConfiguration.java index 1a657343c0..88a1033eca 100644 --- a/dspace-api/src/main/java/org/dspace/orcid/script/OrcidBulkPushScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/orcid/script/OrcidBulkPushScriptConfiguration.java @@ -7,13 +7,8 @@ */ package org.dspace.orcid.script; -import java.sql.SQLException; - import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * Script configuration for {@link OrcidBulkPush}. @@ -24,20 +19,8 @@ import org.springframework.beans.factory.annotation.Autowired; */ public class OrcidBulkPushScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Class getDspaceRunnableClass() { return dspaceRunnableClass; diff --git a/dspace-api/src/main/java/org/dspace/scripts/ProcessServiceImpl.java b/dspace-api/src/main/java/org/dspace/scripts/ProcessServiceImpl.java index 33fea75add..2e14aeaa36 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/ProcessServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/scripts/ProcessServiceImpl.java @@ -129,6 +129,11 @@ public class ProcessServiceImpl implements ProcessService { return processes; } + @Override + public List findByUser(Context context, EPerson eperson, int limit, int offset) throws SQLException { + return processDAO.findByUser(context, eperson, limit, offset); + } + @Override public void start(Context context, Process process) throws SQLException { process.setProcessStatus(ProcessStatus.RUNNING); @@ -311,6 +316,11 @@ public class ProcessServiceImpl implements ProcessService { return this.processDAO.findByStatusAndCreationTimeOlderThan(context, statuses, date); } + @Override + public int countByUser(Context context, EPerson user) throws SQLException { + return processDAO.countByUser(context, user); + } + private String formatLogLine(int processId, String scriptName, String output, ProcessLogLevel processLogLevel) { SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS"); StringBuilder sb = new StringBuilder(); diff --git a/dspace-api/src/main/java/org/dspace/scripts/ScriptServiceImpl.java b/dspace-api/src/main/java/org/dspace/scripts/ScriptServiceImpl.java index c8a7812a51..abb700cb10 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/ScriptServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/scripts/ScriptServiceImpl.java @@ -37,7 +37,7 @@ public class ScriptServiceImpl implements ScriptService { @Override public List getScriptConfigurations(Context context) { return serviceManager.getServicesByType(ScriptConfiguration.class).stream().filter( - scriptConfiguration -> scriptConfiguration.isAllowedToExecute(context)) + scriptConfiguration -> scriptConfiguration.isAllowedToExecute(context, null)) .sorted(Comparator.comparing(ScriptConfiguration::getName)) .collect(Collectors.toList()); } diff --git a/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java index 4b15c22f44..e22063eb49 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java @@ -7,17 +7,28 @@ */ package org.dspace.scripts.configuration; +import java.sql.SQLException; +import java.util.List; + import org.apache.commons.cli.Options; +import org.dspace.authorize.service.AuthorizeService; import org.dspace.core.Context; +import org.dspace.scripts.DSpaceCommandLineParameter; import org.dspace.scripts.DSpaceRunnable; import org.springframework.beans.factory.BeanNameAware; +import org.springframework.beans.factory.annotation.Autowired; /** * This class represents an Abstract class that a ScriptConfiguration can inherit to further implement this - * and represent a script's configuration + * and represent a script's configuration. + * By default script are available only to repository administrators script that have a broader audience + * must override the {@link #isAllowedToExecute(Context, List)} method. */ public abstract class ScriptConfiguration implements BeanNameAware { + @Autowired + protected AuthorizeService authorizeService; + /** * The possible options for this script */ @@ -70,6 +81,7 @@ public abstract class ScriptConfiguration implements B * @param dspaceRunnableClass The dspaceRunnableClass to be set on this IndexDiscoveryScriptConfiguration */ public abstract void setDspaceRunnableClass(Class dspaceRunnableClass); + /** * This method will return if the script is allowed to execute in the given context. This is by default set * to the currentUser in the context being an admin, however this can be overwritten by each script individually @@ -77,7 +89,13 @@ public abstract class ScriptConfiguration implements B * @param context The relevant DSpace context * @return A boolean indicating whether the script is allowed to execute or not */ - public abstract boolean isAllowedToExecute(Context context); + public boolean isAllowedToExecute(Context context, List commandLineParameters) { + try { + return authorizeService.isAdmin(context); + } catch (SQLException e) { + throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); + } + } /** * The getter for the options of the Script diff --git a/dspace-api/src/main/java/org/dspace/scripts/service/ProcessService.java b/dspace-api/src/main/java/org/dspace/scripts/service/ProcessService.java index ce6a173b0e..c6fc248881 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/service/ProcessService.java +++ b/dspace-api/src/main/java/org/dspace/scripts/service/ProcessService.java @@ -255,4 +255,26 @@ public interface ProcessService { */ List findByStatusAndCreationTimeOlderThan(Context context, List statuses, Date date) throws SQLException; + + /** + * Returns a list of all Process objects in the database by the given user. + * + * @param context The relevant DSpace context + * @param user The user to search for + * @param limit The limit for the amount of Processes returned + * @param offset The offset for the Processes to be returned + * @return The list of all Process objects in the Database + * @throws SQLException If something goes wrong + */ + List findByUser(Context context, EPerson user, int limit, int offset) throws SQLException; + + /** + * Count all the processes which is related to the given user. + * + * @param context The relevant DSpace context + * @param user The user to search for + * @return The number of results matching the query + * @throws SQLException If something goes wrong + */ + int countByUser(Context context, EPerson user) throws SQLException; } diff --git a/dspace-api/src/main/java/org/dspace/statistics/export/RetryFailedOpenUrlTrackerScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/statistics/export/RetryFailedOpenUrlTrackerScriptConfiguration.java index dcae4aa4cb..7d1015c8e2 100644 --- a/dspace-api/src/main/java/org/dspace/statistics/export/RetryFailedOpenUrlTrackerScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/statistics/export/RetryFailedOpenUrlTrackerScriptConfiguration.java @@ -7,13 +7,8 @@ */ package org.dspace.statistics.export; -import java.sql.SQLException; - import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * The {@link ScriptConfiguration} for the {@link RetryFailedOpenUrlTracker} script @@ -21,9 +16,6 @@ import org.springframework.beans.factory.annotation.Autowired; public class RetryFailedOpenUrlTrackerScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -41,15 +33,6 @@ public class RetryFailedOpenUrlTrackerScriptConfiguration extends ScriptConfiguration { - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -38,15 +30,6 @@ public class SubmissionFormsMigrationCliScriptConfiguration + extends ScriptConfiguration { + + private Class dspaceRunnableClass; @Override - public boolean isAllowedToExecute(Context context) { + public Class getDspaceRunnableClass() { + return this.dspaceRunnableClass; + } + + @Override + public void setDspaceRunnableClass(Class dspaceRunnableClass) { + this.dspaceRunnableClass = dspaceRunnableClass; + } + + @Override + public Options getOptions() { + if (options == null) { + Options options = new Options(); + + options.addOption("f", "input-forms", true, "Path to source input-forms.xml file location"); + options.addOption("s", "item-submission", true, "Path to source item-submission.xml file location"); + options.addOption("h", "help", false, "help"); + + super.options = options; + } + return options; + } + + @Override + public boolean isAllowedToExecute(Context context, List commandLineParameters) { // Script is not allowed to be executed from REST side return false; } diff --git a/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationConfiguration.java b/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationConfiguration.java index 52685b563d..dd61fab967 100644 --- a/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/subscriptions/SubscriptionEmailNotificationConfiguration.java @@ -8,15 +8,11 @@ package org.dspace.subscriptions; -import java.sql.SQLException; import java.util.Objects; import org.apache.commons.cli.Options; -import org.dspace.authorize.AuthorizeServiceImpl; -import org.dspace.core.Context; import org.dspace.scripts.DSpaceRunnable; import org.dspace.scripts.configuration.ScriptConfiguration; -import org.springframework.beans.factory.annotation.Autowired; /** * Implementation of {@link DSpaceRunnable} to find subscribed objects and send notification mails about them @@ -26,18 +22,6 @@ public class SubscriptionEmailNotificationConfiguration dspaceRunnableClass; - @Autowired - private AuthorizeServiceImpl authorizeService; - - @Override - public boolean isAllowedToExecute(Context context) { - try { - return authorizeService.isAdmin(context); - } catch (SQLException e) { - throw new RuntimeException("SQLException occurred when checking if the current user is an admin", e); - } - } - @Override public Options getOptions() { if (Objects.isNull(options)) { diff --git a/dspace-api/src/test/java/org/dspace/builder/AbstractDSpaceObjectBuilder.java b/dspace-api/src/test/java/org/dspace/builder/AbstractDSpaceObjectBuilder.java index ff1083d318..b20515017a 100644 --- a/dspace-api/src/test/java/org/dspace/builder/AbstractDSpaceObjectBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/AbstractDSpaceObjectBuilder.java @@ -162,8 +162,8 @@ public abstract class AbstractDSpaceObjectBuilder return (B) this; } /** - * Support method to grant the {@link Constants#READ} permission over an object only to a specific group. Any other - * READ permissions will be removed + * Support method to grant the {@link Constants#ADMIN} permission over an object only to a specific eperson. + * If another ADMIN policy is in place for an eperson it will be replaced * * @param dso * the DSpaceObject on which grant the permission diff --git a/dspace-api/src/test/java/org/dspace/builder/ItemBuilder.java b/dspace-api/src/test/java/org/dspace/builder/ItemBuilder.java index 70dea309f2..3e5ab0f38f 100644 --- a/dspace-api/src/test/java/org/dspace/builder/ItemBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/ItemBuilder.java @@ -353,9 +353,9 @@ public class ItemBuilder extends AbstractDSpaceObjectBuilder { } /** - * Create an admin group for the collection with the specified members + * Assign the admin permission to the specified eperson * - * @param ePerson epersons to add to the admin group + * @param ePerson the eperson that will get the ADMIN permission on the item * @return this builder * @throws SQLException * @throws AuthorizeException diff --git a/dspace-api/src/test/java/org/dspace/builder/ProcessBuilder.java b/dspace-api/src/test/java/org/dspace/builder/ProcessBuilder.java index 86573940e4..0631e1b55a 100644 --- a/dspace-api/src/test/java/org/dspace/builder/ProcessBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/ProcessBuilder.java @@ -113,6 +113,9 @@ public class ProcessBuilder extends AbstractBuilder { } public static void deleteProcess(Integer integer) throws SQLException, IOException { + if (integer == null) { + return; + } try (Context c = new Context()) { c.turnOffAuthorisationSystem(); Process process = processService.find(c, integer); diff --git a/dspace-api/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java b/dspace-api/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java index f69c0e3af7..632b4e2f83 100644 --- a/dspace-api/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java +++ b/dspace-api/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java @@ -8,21 +8,13 @@ package org.dspace.scripts; import java.io.InputStream; -import java.sql.SQLException; import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; import org.dspace.scripts.impl.MockDSpaceRunnableScript; -import org.springframework.beans.factory.annotation.Autowired; public class MockDSpaceRunnableScriptConfiguration extends ScriptConfiguration { - - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -39,15 +31,6 @@ public class MockDSpaceRunnableScriptConfiguration> startProcess( @PathVariable(name = "name") String scriptName, @RequestParam(name = "file", required = false) List files) @@ -75,4 +77,13 @@ public class ScriptProcessesController { return ControllerUtils.toResponseEntity(HttpStatus.ACCEPTED, new HttpHeaders(), processResource); } + @RequestMapping(method = RequestMethod.POST, consumes = "!" + MediaType.MULTIPART_FORM_DATA_VALUE) + @PreAuthorize("hasAuthority('AUTHENTICATED')") + public ResponseEntity> startProcessInvalidMimeType( + @PathVariable(name = "name") String scriptName, + @RequestParam(name = "file", required = false) List files) + throws Exception { + throw new DSpaceBadRequestException("Invalid mimetype"); + } + } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ProcessRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ProcessRestRepository.java index 33addf7049..2479eeda97 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ProcessRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ProcessRestRepository.java @@ -94,6 +94,22 @@ public class ProcessRestRepository extends DSpaceRestRepository findByCurrentUser(Pageable pageable) { + + try { + Context context = obtainContext(); + long total = processService.countByUser(context, context.getCurrentUser()); + List processes = processService.findByUser(context, context.getCurrentUser(), + pageable.getPageSize(), + Math.toIntExact(pageable.getOffset())); + return converter.toRestPage(processes, pageable, total, utils.obtainProjection()); + } catch (SQLException e) { + throw new RuntimeException(e.getMessage(), e); + } + } + /** * Calls on the getBitstreams method to retrieve all the Bitstreams of this process * @param processId The processId of the Process to retrieve the Bitstreams for diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ScriptRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ScriptRestRepository.java index d974a6d78a..2fc996a327 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ScriptRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ScriptRestRepository.java @@ -37,6 +37,7 @@ import org.dspace.scripts.service.ScriptService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; +import org.springframework.data.rest.webmvc.ResourceNotFoundException; import org.springframework.security.access.AccessDeniedException; import org.springframework.security.access.prepost.PreAuthorize; import org.springframework.stereotype.Component; @@ -56,29 +57,24 @@ public class ScriptRestRepository extends DSpaceRestRepository findAll(Context context, Pageable pageable) { List scriptConfigurations = scriptService.getScriptConfigurations(context); @@ -104,11 +100,17 @@ public class ScriptRestRepository extends DSpaceRestRepository dSpaceCommandLineParameters = processPropertiesToDSpaceCommandLineParameters(properties); ScriptConfiguration scriptToExecute = scriptService.getScriptConfiguration(scriptName); + if (scriptToExecute == null) { - throw new DSpaceBadRequestException("The script for name: " + scriptName + " wasn't found"); + throw new ResourceNotFoundException("The script for name: " + scriptName + " wasn't found"); } - if (!scriptToExecute.isAllowedToExecute(context)) { - throw new AuthorizeException("Current user is not eligible to execute script with name: " + scriptName); + try { + if (!scriptToExecute.isAllowedToExecute(context, dSpaceCommandLineParameters)) { + throw new AuthorizeException("Current user is not eligible to execute script with name: " + scriptName + + " and the specified parameters " + StringUtils.join(dSpaceCommandLineParameters, ", ")); + } + } catch (IllegalArgumentException e) { + throw new DSpaceBadRequestException("missed handle"); } RestDSpaceRunnableHandler restDSpaceRunnableHandler = new RestDSpaceRunnableHandler( context.getCurrentUser(), scriptToExecute.getName(), dSpaceCommandLineParameters, diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ProcessRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ProcessRestRepositoryIT.java index 5ac416e606..d76e20b23d 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ProcessRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ProcessRestRepositoryIT.java @@ -7,6 +7,8 @@ */ package org.dspace.app.rest; +import static org.dspace.app.rest.matcher.ProcessMatcher.matchProcess; +import static org.dspace.content.ProcessStatus.SCHEDULED; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.is; @@ -783,6 +785,30 @@ public class ProcessRestRepositoryIT extends AbstractControllerIntegrationTest { .andExpect(status().isBadRequest()); } + @Test + public void testFindByCurrentUser() throws Exception { + + Process process1 = ProcessBuilder.createProcess(context, eperson, "mock-script", parameters) + .withStartAndEndTime("10/01/1990", "20/01/1990") + .build(); + ProcessBuilder.createProcess(context, admin, "mock-script", parameters) + .withStartAndEndTime("11/01/1990", "19/01/1990") + .build(); + Process process3 = ProcessBuilder.createProcess(context, eperson, "mock-script", parameters) + .withStartAndEndTime("12/01/1990", "18/01/1990") + .build(); + + String token = getAuthToken(eperson.getEmail(), password); + + getClient(token).perform(get("/api/system/processes/search/own")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.processes", contains( + matchProcess(process3.getName(), eperson.getID().toString(), process3.getID(), parameters, SCHEDULED), + matchProcess(process1.getName(), eperson.getID().toString(), process1.getID(), parameters, SCHEDULED)))) + .andExpect(jsonPath("$.page", is(PageMatcher.pageEntryWithTotalPagesAndElements(0, 20, 1, 2)))); + + } + @Test public void getProcessOutput() throws Exception { try (InputStream is = IOUtils.toInputStream("Test File For Process", CharEncoding.UTF_8)) { diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ScriptRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ScriptRestRepositoryIT.java index 07edfeec33..16e691ef6f 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ScriptRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ScriptRestRepositoryIT.java @@ -12,6 +12,7 @@ import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasJsonPath; import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertThat; @@ -44,6 +45,7 @@ import org.dspace.app.rest.test.AbstractControllerIntegrationTest; import org.dspace.authorize.AuthorizeException; import org.dspace.builder.CollectionBuilder; import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.EPersonBuilder; import org.dspace.builder.GroupBuilder; import org.dspace.builder.ItemBuilder; import org.dspace.builder.ProcessBuilder; @@ -53,6 +55,7 @@ import org.dspace.content.Community; import org.dspace.content.Item; import org.dspace.content.ProcessStatus; import org.dspace.content.service.BitstreamService; +import org.dspace.eperson.EPerson; import org.dspace.eperson.Group; import org.dspace.scripts.DSpaceCommandLineParameter; import org.dspace.scripts.Process; @@ -123,12 +126,65 @@ public class ScriptRestRepositoryIT extends AbstractControllerIntegrationTest { @Test - public void findAllScriptsUnauthorizedTest() throws Exception { + public void findAllScriptsGenericLoggedInUserTest() throws Exception { String token = getAuthToken(eperson.getEmail(), password); getClient(token).perform(get("/api/system/scripts")) - .andExpect(status().isForbidden()); + .andExpect(status().isOk()) + .andExpect(jsonPath("$.page.totalElements", is(0))); + } + @Test + public void findAllScriptsLocalAdminsTest() throws Exception { + context.turnOffAuthorisationSystem(); + EPerson comAdmin = EPersonBuilder.createEPerson(context) + .withEmail("comAdmin@example.com") + .withPassword(password).build(); + EPerson colAdmin = EPersonBuilder.createEPerson(context) + .withEmail("colAdmin@example.com") + .withPassword(password).build(); + EPerson itemAdmin = EPersonBuilder.createEPerson(context) + .withEmail("itemAdmin@example.com") + .withPassword(password).build(); + Community community = CommunityBuilder.createCommunity(context) + .withName("Community") + .withAdminGroup(comAdmin) + .build(); + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("Collection") + .withAdminGroup(colAdmin) + .build(); + ItemBuilder.createItem(context, collection).withAdminUser(itemAdmin) + .withTitle("Test item to curate").build(); + context.restoreAuthSystemState(); + ScriptConfiguration curateScriptConfiguration = + scriptConfigurations.stream().filter(scriptConfiguration + -> scriptConfiguration.getName().equals("curate")) + .findAny().get(); + + // the local admins have at least access to the curate script + // and not access to process-cleaner script + String comAdminToken = getAuthToken(comAdmin.getEmail(), password); + getClient(comAdminToken).perform(get("/api/system/scripts").param("size", "100")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.scripts", Matchers.hasItem( + ScriptMatcher.matchScript(curateScriptConfiguration.getName(), + curateScriptConfiguration.getDescription())))) + .andExpect(jsonPath("$.page.totalElements", greaterThanOrEqualTo(1))); + String colAdminToken = getAuthToken(colAdmin.getEmail(), password); + getClient(colAdminToken).perform(get("/api/system/scripts").param("size", "100")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.scripts", Matchers.hasItem( + ScriptMatcher.matchScript(curateScriptConfiguration.getName(), + curateScriptConfiguration.getDescription())))) + .andExpect(jsonPath("$.page.totalElements", greaterThanOrEqualTo(1))); + String itemAdminToken = getAuthToken(itemAdmin.getEmail(), password); + getClient(itemAdminToken).perform(get("/api/system/scripts").param("size", "100")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.scripts", Matchers.hasItem( + ScriptMatcher.matchScript(curateScriptConfiguration.getName(), + curateScriptConfiguration.getDescription())))) + .andExpect(jsonPath("$.page.totalElements", greaterThanOrEqualTo(1))); } @Test @@ -222,6 +278,63 @@ public class ScriptRestRepositoryIT extends AbstractControllerIntegrationTest { )); } + @Test + public void findOneScriptByNameLocalAdminsTest() throws Exception { + context.turnOffAuthorisationSystem(); + EPerson comAdmin = EPersonBuilder.createEPerson(context) + .withEmail("comAdmin@example.com") + .withPassword(password).build(); + EPerson colAdmin = EPersonBuilder.createEPerson(context) + .withEmail("colAdmin@example.com") + .withPassword(password).build(); + EPerson itemAdmin = EPersonBuilder.createEPerson(context) + .withEmail("itemAdmin@example.com") + .withPassword(password).build(); + Community community = CommunityBuilder.createCommunity(context) + .withName("Community") + .withAdminGroup(comAdmin) + .build(); + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("Collection") + .withAdminGroup(colAdmin) + .build(); + ItemBuilder.createItem(context, collection).withAdminUser(itemAdmin) + .withTitle("Test item to curate").build(); + context.restoreAuthSystemState(); + ScriptConfiguration curateScriptConfiguration = + scriptConfigurations.stream().filter(scriptConfiguration + -> scriptConfiguration.getName().equals("curate")) + .findAny().get(); + + String comAdminToken = getAuthToken(comAdmin.getEmail(), password); + String colAdminToken = getAuthToken(colAdmin.getEmail(), password); + String itemAdminToken = getAuthToken(itemAdmin.getEmail(), password); + getClient(comAdminToken).perform(get("/api/system/scripts/" + curateScriptConfiguration.getName())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", ScriptMatcher + .matchScript( + curateScriptConfiguration.getName(), + curateScriptConfiguration.getDescription()))); + getClient(colAdminToken).perform(get("/api/system/scripts/" + curateScriptConfiguration.getName())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", ScriptMatcher + .matchScript( + curateScriptConfiguration.getName(), + curateScriptConfiguration.getDescription()))); + getClient(itemAdminToken).perform(get("/api/system/scripts/" + curateScriptConfiguration.getName())) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", ScriptMatcher + .matchScript( + curateScriptConfiguration.getName(), + curateScriptConfiguration.getDescription()))); + } + + @Test + public void findOneScriptByNameNotAuthenticatedTest() throws Exception { + getClient().perform(get("/api/system/scripts/mock-script")) + .andExpect(status().isUnauthorized()); + } + @Test public void findOneScriptByNameTestAccessDenied() throws Exception { String token = getAuthToken(eperson.getEmail(), password); @@ -235,7 +348,7 @@ public class ScriptRestRepositoryIT extends AbstractControllerIntegrationTest { String token = getAuthToken(admin.getEmail(), password); getClient(token).perform(get("/api/system/scripts/mock-script-invalid")) - .andExpect(status().isBadRequest()); + .andExpect(status().isNotFound()); } @Test @@ -277,16 +390,6 @@ public class ScriptRestRepositoryIT extends AbstractControllerIntegrationTest { @Test public void postProcessAdminNoOptionsFailedStatus() throws Exception { -// List list = new LinkedList<>(); -// -// ParameterValueRest parameterValueRest = new ParameterValueRest(); -// parameterValueRest.setName("-z"); -// parameterValueRest.setValue("test"); -// ParameterValueRest parameterValueRest1 = new ParameterValueRest(); -// parameterValueRest1.setName("-q"); -// list.add(parameterValueRest); -// list.add(parameterValueRest1); - LinkedList parameters = new LinkedList<>(); parameters.add(new DSpaceCommandLineParameter("-z", "test")); @@ -322,7 +425,7 @@ public class ScriptRestRepositoryIT extends AbstractControllerIntegrationTest { String token = getAuthToken(admin.getEmail(), password); getClient(token).perform(multipart("/api/system/scripts/mock-script-invalid/processes")) - .andExpect(status().isBadRequest()); + .andExpect(status().isNotFound()); } @Test @@ -434,6 +537,8 @@ public class ScriptRestRepositoryIT extends AbstractControllerIntegrationTest { } + + @Test public void postProcessAdminWithWrongContentTypeBadRequestException() throws Exception { @@ -601,9 +706,9 @@ public class ScriptRestRepositoryIT extends AbstractControllerIntegrationTest { } } - @After public void destroy() throws Exception { + context.turnOffAuthorisationSystem(); CollectionUtils.emptyIfNull(processService.findAll(context)).stream().forEach(process -> { try { processService.delete(context, process); @@ -611,6 +716,7 @@ public class ScriptRestRepositoryIT extends AbstractControllerIntegrationTest { throw new RuntimeException(e); } }); + context.restoreAuthSystemState(); super.destroy(); } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/scripts/TypeConversionTestScriptConfiguration.java b/dspace-server-webapp/src/test/java/org/dspace/app/scripts/TypeConversionTestScriptConfiguration.java index 27c37f1487..ccb7d43a23 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/scripts/TypeConversionTestScriptConfiguration.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/scripts/TypeConversionTestScriptConfiguration.java @@ -11,7 +11,6 @@ import java.io.InputStream; import org.apache.commons.cli.Options; import org.dspace.app.rest.converter.ScriptConverter; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; /** @@ -28,10 +27,6 @@ public class TypeConversionTestScriptConfiguration siteParameters = new LinkedList<>(); + siteParameters.add(new DSpaceCommandLineParameter("-i", site.getHandle())); + siteParameters.add(new DSpaceCommandLineParameter("-t", "noop")); + LinkedList comParameters = new LinkedList<>(); + comParameters.add(new DSpaceCommandLineParameter("-i", community.getHandle())); + comParameters.add(new DSpaceCommandLineParameter("-t", "noop")); + LinkedList anotherComParameters = new LinkedList<>(); + anotherComParameters.add(new DSpaceCommandLineParameter("-i", anotherCommunity.getHandle())); + anotherComParameters.add(new DSpaceCommandLineParameter("-t", "noop")); + LinkedList colParameters = new LinkedList<>(); + colParameters.add(new DSpaceCommandLineParameter("-i", collection.getHandle())); + colParameters.add(new DSpaceCommandLineParameter("-t", "noop")); + LinkedList anotherColParameters = new LinkedList<>(); + anotherColParameters.add(new DSpaceCommandLineParameter("-i", anotherCollection.getHandle())); + anotherColParameters.add(new DSpaceCommandLineParameter("-t", "noop")); + LinkedList itemParameters = new LinkedList<>(); + itemParameters.add(new DSpaceCommandLineParameter("-i", item.getHandle())); + itemParameters.add(new DSpaceCommandLineParameter("-t", "noop")); + LinkedList anotherItemParameters = new LinkedList<>(); + anotherItemParameters.add(new DSpaceCommandLineParameter("-i", anotherItem.getHandle())); + anotherItemParameters.add(new DSpaceCommandLineParameter("-t", "noop")); + String comAdminToken = getAuthToken(comAdmin.getEmail(), password); + String colAdminToken = getAuthToken(colAdmin.getEmail(), password); + String itemAdminToken = getAuthToken(itemAdmin.getEmail(), password); + + List listCurateSite = siteParameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + List listCom = comParameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + List listAnotherCom = anotherComParameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + List listCol = colParameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + List listAnotherCol = anotherColParameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + List listItem = itemParameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + List listAnotherItem = anotherItemParameters.stream() + .map(dSpaceCommandLineParameter -> dSpaceRunnableParameterConverter + .convert(dSpaceCommandLineParameter, Projection.DEFAULT)) + .collect(Collectors.toList()); + String adminToken = getAuthToken(admin.getEmail(), password); + List acceptableProcessStatuses = new LinkedList<>(); + acceptableProcessStatuses.addAll(Arrays.asList(ProcessStatus.SCHEDULED, + ProcessStatus.RUNNING, + ProcessStatus.COMPLETED)); + + AtomicReference idSiteRef = new AtomicReference<>(); + AtomicReference idComRef = new AtomicReference<>(); + AtomicReference idComColRef = new AtomicReference<>(); + AtomicReference idComItemRef = new AtomicReference<>(); + AtomicReference idColRef = new AtomicReference<>(); + AtomicReference idColItemRef = new AtomicReference<>(); + AtomicReference idItemRef = new AtomicReference<>(); + + ScriptConfiguration curateScriptConfiguration = scriptService.getScriptConfiguration("curate"); + // we should be able to start the curate script with all our admins on the respective dso + try { + // start a process as general admin + getClient(adminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCurateSite))) + .andExpect(status().isAccepted()) + .andExpect(jsonPath("$", is( + ProcessMatcher.matchProcess("curate", + String.valueOf(admin.getID()), + siteParameters, + acceptableProcessStatuses)))) + .andDo(result -> idSiteRef + .set(read(result.getResponse().getContentAsString(), "$.processId"))); + + // check with the com admin + getClient(comAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCom))) + .andExpect(status().isAccepted()) + .andExpect(jsonPath("$", is( + ProcessMatcher.matchProcess("curate", + String.valueOf(comAdmin.getID()), + comParameters, + acceptableProcessStatuses)))) + .andDo(result -> idComRef + .set(read(result.getResponse().getContentAsString(), "$.processId"))); + // the com admin should be able to run the curate also over the children collection and item + getClient(comAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCol))) + .andExpect(status().isAccepted()) + .andExpect(jsonPath("$", is( + ProcessMatcher.matchProcess("curate", + String.valueOf(comAdmin.getID()), + colParameters, + acceptableProcessStatuses)))) + .andDo(result -> idComColRef + .set(read(result.getResponse().getContentAsString(), "$.processId"))); + getClient(comAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listItem))) + .andExpect(status().isAccepted()) + .andExpect(jsonPath("$", is( + ProcessMatcher.matchProcess("curate", + String.valueOf(comAdmin.getID()), + itemParameters, + acceptableProcessStatuses)))) + .andDo(result -> idComItemRef + .set(read(result.getResponse().getContentAsString(), "$.processId"))); + // the com admin should be NOT able to run the curate over other com, col or items + getClient(comAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCurateSite))) + .andExpect(status().isForbidden()); + getClient(comAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listAnotherCom))) + .andExpect(status().isForbidden()); + getClient(comAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listAnotherCol))) + .andExpect(status().isForbidden()); + getClient(comAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listAnotherItem))) + .andExpect(status().isForbidden()); + + // check with the col admin + getClient(colAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCol))) + .andExpect(status().isAccepted()) + .andExpect(jsonPath("$", is( + ProcessMatcher.matchProcess("curate", + String.valueOf(colAdmin.getID()), + colParameters, + acceptableProcessStatuses)))) + .andDo(result -> idColRef + .set(read(result.getResponse().getContentAsString(), "$.processId"))); + // the col admin should be able to run the curate also over the owned item + getClient(colAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listItem))) + .andExpect(status().isAccepted()) + .andExpect(jsonPath("$", is( + ProcessMatcher.matchProcess("curate", + String.valueOf(colAdmin.getID()), + itemParameters, + acceptableProcessStatuses)))) + .andDo(result -> idColItemRef + .set(read(result.getResponse().getContentAsString(), "$.processId"))); + + // the col admin should be NOT able to run the curate over the community nor another collection nor + // on a not owned item + getClient(colAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCurateSite))) + .andExpect(status().isForbidden()); + getClient(colAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCom))) + .andExpect(status().isForbidden()); + getClient(colAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listAnotherCol))) + .andExpect(status().isForbidden()); + getClient(colAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listAnotherItem))) + .andExpect(status().isForbidden()); + + // check with the item admin + getClient(itemAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listItem))) + .andExpect(status().isAccepted()) + .andExpect(jsonPath("$", is( + ProcessMatcher.matchProcess("curate", + String.valueOf(itemAdmin.getID()), + itemParameters, + acceptableProcessStatuses)))) + .andDo(result -> idItemRef + .set(read(result.getResponse().getContentAsString(), "$.processId"))); + // the item admin should be NOT able to run the curate over the community nor the collection nor + // on a not owned item + getClient(itemAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCurateSite))) + .andExpect(status().isForbidden()); + getClient(itemAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCom))) + .andExpect(status().isForbidden()); + getClient(itemAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listCol))) + .andExpect(status().isForbidden()); + getClient(itemAdminToken) + .perform(multipart("/api/system/scripts/" + curateScriptConfiguration.getName() + "/processes") + .param("properties", new ObjectMapper().writeValueAsString(listAnotherItem))) + .andExpect(status().isForbidden()); + + } finally { + ProcessBuilder.deleteProcess(idSiteRef.get()); + ProcessBuilder.deleteProcess(idComRef.get()); + ProcessBuilder.deleteProcess(idComColRef.get()); + ProcessBuilder.deleteProcess(idComItemRef.get()); + ProcessBuilder.deleteProcess(idColRef.get()); + ProcessBuilder.deleteProcess(idColItemRef.get()); + ProcessBuilder.deleteProcess(idItemRef.get()); + } + } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java b/dspace-server-webapp/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java index f69c0e3af7..632b4e2f83 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java +++ b/dspace-server-webapp/src/test/java/org/dspace/scripts/MockDSpaceRunnableScriptConfiguration.java @@ -8,21 +8,13 @@ package org.dspace.scripts; import java.io.InputStream; -import java.sql.SQLException; import org.apache.commons.cli.Options; -import org.dspace.authorize.service.AuthorizeService; -import org.dspace.core.Context; import org.dspace.scripts.configuration.ScriptConfiguration; import org.dspace.scripts.impl.MockDSpaceRunnableScript; -import org.springframework.beans.factory.annotation.Autowired; public class MockDSpaceRunnableScriptConfiguration extends ScriptConfiguration { - - @Autowired - private AuthorizeService authorizeService; - private Class dspaceRunnableClass; @Override @@ -39,15 +31,6 @@ public class MockDSpaceRunnableScriptConfiguration Date: Fri, 5 May 2023 18:45:52 +0200 Subject: [PATCH 051/125] 100553: Added test for create metadata schema & field and created test for sort byFieldName --- .../MetadataFieldRestRepository.java | 19 ++- .../MetadataSchemaRestRepository.java | 6 +- .../rest/MetadataSchemaRestRepositoryIT.java | 23 +++- .../rest/MetadatafieldRestRepositoryIT.java | 114 +++++++++++++++++- 4 files changed, 149 insertions(+), 13 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java index c185e83342..eefd6331d1 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java @@ -14,6 +14,7 @@ import static org.dspace.app.rest.model.SearchConfigurationRest.Filter.OPERATOR_ import java.io.IOException; import java.sql.SQLException; import java.util.ArrayList; +import java.util.Iterator; import java.util.List; import java.util.Objects; import javax.servlet.http.HttpServletRequest; @@ -45,10 +46,10 @@ import org.dspace.discovery.indexobject.MetadataFieldIndexFactoryImpl; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Sort; import org.springframework.data.rest.webmvc.ResourceNotFoundException; import org.springframework.security.access.prepost.PreAuthorize; import org.springframework.stereotype.Component; - /** * This is the repository responsible to manage MetadataField Rest object * @@ -213,7 +214,13 @@ public class MetadataFieldRestRepository extends DSpaceRestRepository orderIterator = pageable.getSort().iterator(); + if (orderIterator.hasNext()) { + Sort.Order order = orderIterator.next(); + discoverQuery.setSortField(order.getProperty() + "_sort", + order.getDirection() == Sort.Direction.ASC ? DiscoverQuery.SORT_ORDER.asc : + DiscoverQuery.SORT_ORDER.desc); + } discoverQuery.setStart(Math.toIntExact(pageable.getOffset())); discoverQuery.setMaxResults(pageable.getPageSize()); return discoverQuery; @@ -254,13 +261,13 @@ public class MetadataFieldRestRepository extends DSpaceRestRepository idRef = new AtomicReference<>(); try { - assertThat(metadataFieldService.findByElement(context, metadataSchema, ELEMENT, QUALIFIER), nullValue()); + assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(), + metadataFieldRest.getQualifier()), nullValue()); getClient(authToken) .perform(post("/api/core/metadatafields") @@ -606,7 +672,8 @@ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegration String authToken = getAuthToken(admin.getEmail(), password); Integer id = null; try { - assertThat(metadataFieldService.findByElement(context, metadataSchema, ELEMENT, null), nullValue()); + assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(), + null), nullValue()); id = read( getClient(authToken) @@ -641,7 +708,8 @@ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegration String authToken = getAuthToken(admin.getEmail(), password); AtomicReference idRef = new AtomicReference<>(); try { - assertThat(metadataFieldService.findByElement(context, metadataSchema, ELEMENT, QUALIFIER), nullValue()); + assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(), + metadataFieldRest.getQualifier()), nullValue()); getClient(authToken) .perform(post("/api/core/metadatafields") @@ -689,6 +757,46 @@ public class MetadatafieldRestRepositoryIT extends AbstractControllerIntegration .andExpect(status().isUnauthorized()); } + @Test + public void createUnprocessableEntity_elementContainingDots() throws Exception { + MetadataFieldRest metadataFieldRest = new MetadataFieldRest(); + metadataFieldRest.setElement("testElement.ForCreate"); + metadataFieldRest.setQualifier(QUALIFIER); + metadataFieldRest.setScopeNote(SCOPE_NOTE); + + String authToken = getAuthToken(admin.getEmail(), password); + assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(), + metadataFieldRest.getQualifier()), nullValue()); + + getClient(authToken) + .perform(post("/api/core/metadatafields") + .param("schemaId", String.valueOf(metadataSchema.getID())) + .param("projection", "full") + .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); + } + + @Test + public void createUnprocessableEntity_qualifierContainingDots() throws Exception { + MetadataFieldRest metadataFieldRest = new MetadataFieldRest(); + metadataFieldRest.setElement(ELEMENT); + metadataFieldRest.setQualifier("testQualifier.ForCreate"); + metadataFieldRest.setScopeNote(SCOPE_NOTE); + + String authToken = getAuthToken(admin.getEmail(), password); + assertThat(metadataFieldService.findByElement(context, metadataSchema, metadataFieldRest.getElement(), + metadataFieldRest.getQualifier()), nullValue()); + + getClient(authToken) + .perform(post("/api/core/metadatafields") + .param("schemaId", String.valueOf(metadataSchema.getID())) + .param("projection", "full") + .content(new ObjectMapper().writeValueAsBytes(metadataFieldRest)) + .contentType(contentType)) + .andExpect(status().isUnprocessableEntity()); + } + @Test public void createUnauthorizedEPersonNoAdminRights() throws Exception { From 2dfc373ad1c8e95146121e0c19f1e740879cb00b Mon Sep 17 00:00:00 2001 From: Alexandre Vryghem Date: Sat, 29 Apr 2023 19:24:14 +0200 Subject: [PATCH 052/125] Fixed communities sub-communities and communities collections sorting --- .../CommunityCollectionLinkRepository.java | 10 +++ .../CommunitySubcommunityLinkRepository.java | 10 +++ .../CommunityCollectionLinkRepositoryIT.java | 82 +++++++++++++++++++ ...CommunitySubcommunityLinkRepositoryIT.java | 80 ++++++++++++++++++ 4 files changed, 182 insertions(+) create mode 100644 dspace-server-webapp/src/test/java/org/dspace/app/rest/repository/CommunityCollectionLinkRepositoryIT.java create mode 100644 dspace-server-webapp/src/test/java/org/dspace/app/rest/repository/CommunitySubcommunityLinkRepositoryIT.java diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/CommunityCollectionLinkRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/CommunityCollectionLinkRepository.java index c77dcf18dc..3c728d8c31 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/CommunityCollectionLinkRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/CommunityCollectionLinkRepository.java @@ -8,6 +8,7 @@ package org.dspace.app.rest.repository; import java.sql.SQLException; +import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.UUID; @@ -31,6 +32,7 @@ import org.dspace.discovery.indexobject.factory.IndexObjectFactoryFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Sort.Order; import org.springframework.data.rest.webmvc.ResourceNotFoundException; import org.springframework.security.access.prepost.PreAuthorize; import org.springframework.stereotype.Component; @@ -72,6 +74,14 @@ public class CommunityCollectionLinkRepository extends AbstractDSpaceRestReposit discoverQuery.setStart(Math.toIntExact(pageable.getOffset())); discoverQuery.setMaxResults(pageable.getPageSize()); discoverQuery.setSortField("dc.title_sort", DiscoverQuery.SORT_ORDER.asc); + Iterator orderIterator = pageable.getSort().iterator(); + if (orderIterator.hasNext()) { + Order order = orderIterator.next(); + discoverQuery.setSortField( + order.getProperty() + "_sort", + order.getDirection().isAscending() ? DiscoverQuery.SORT_ORDER.asc : DiscoverQuery.SORT_ORDER.desc + ); + } DiscoverResult resp = searchService.search(context, scopeObject, discoverQuery); long tot = resp.getTotalSearchResults(); for (IndexableObject solrCol : resp.getIndexableObjects()) { diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/CommunitySubcommunityLinkRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/CommunitySubcommunityLinkRepository.java index c211810d11..135d964f3f 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/CommunitySubcommunityLinkRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/CommunitySubcommunityLinkRepository.java @@ -8,6 +8,7 @@ package org.dspace.app.rest.repository; import java.sql.SQLException; +import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.UUID; @@ -29,6 +30,7 @@ import org.dspace.discovery.indexobject.factory.IndexObjectFactoryFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; +import org.springframework.data.domain.Sort.Order; import org.springframework.data.rest.webmvc.ResourceNotFoundException; import org.springframework.security.access.prepost.PreAuthorize; import org.springframework.stereotype.Component; @@ -68,6 +70,14 @@ public class CommunitySubcommunityLinkRepository extends AbstractDSpaceRestRepos discoverQuery.setStart(Math.toIntExact(pageable.getOffset())); discoverQuery.setMaxResults(pageable.getPageSize()); discoverQuery.setSortField("dc.title_sort", DiscoverQuery.SORT_ORDER.asc); + Iterator orderIterator = pageable.getSort().iterator(); + if (orderIterator.hasNext()) { + Order order = orderIterator.next(); + discoverQuery.setSortField( + order.getProperty() + "_sort", + order.getDirection().isAscending() ? DiscoverQuery.SORT_ORDER.asc : DiscoverQuery.SORT_ORDER.desc + ); + } DiscoverResult resp = searchService.search(context, scopeObject, discoverQuery); long tot = resp.getTotalSearchResults(); for (IndexableObject solrCommunities : resp.getIndexableObjects()) { diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/repository/CommunityCollectionLinkRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/repository/CommunityCollectionLinkRepositoryIT.java new file mode 100644 index 0000000000..24a94a4d4b --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/repository/CommunityCollectionLinkRepositoryIT.java @@ -0,0 +1,82 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.repository; + +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +import org.dspace.app.rest.matcher.CollectionMatcher; +import org.dspace.app.rest.test.AbstractControllerIntegrationTest; +import org.dspace.builder.CollectionBuilder; +import org.dspace.builder.CommunityBuilder; +import org.dspace.content.Collection; +import org.dspace.content.Community; +import org.hamcrest.Matchers; +import org.junit.Before; +import org.junit.Test; + +/** + * Test class for {@link CommunityCollectionLinkRepository} + */ +public class CommunityCollectionLinkRepositoryIT extends AbstractControllerIntegrationTest { + + Community parentCommunity; + Collection collection1; + Collection collection2; + Collection collection3; + + @Before + @Override + public void setUp() throws Exception { + super.setUp(); + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .build(); + collection1 = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 1") + .build(); + collection2 = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 2") + .build(); + collection3 = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 3") + .build(); + context.commit(); + context.restoreAuthSystemState(); + } + + @Test + public void getCollections_sortTitleASC() throws Exception { + String adminToken = getAuthToken(admin.getEmail(), password); + + getClient(adminToken).perform(get("/api/core/communities/" + parentCommunity.getID() + "/collections") + .param("sort", "dc.title,ASC")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.collections", Matchers.contains( + CollectionMatcher.matchCollection(collection1), + CollectionMatcher.matchCollection(collection2), + CollectionMatcher.matchCollection(collection3) + ))); + } + + @Test + public void getCollections_sortTitleDESC() throws Exception { + String adminToken = getAuthToken(admin.getEmail(), password); + + getClient(adminToken).perform(get("/api/core/communities/" + parentCommunity.getID() + "/collections") + .param("sort", "dc.title,DESC")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.collections", Matchers.contains( + CollectionMatcher.matchCollection(collection3), + CollectionMatcher.matchCollection(collection2), + CollectionMatcher.matchCollection(collection1) + ))); + } + +} diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/repository/CommunitySubcommunityLinkRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/repository/CommunitySubcommunityLinkRepositoryIT.java new file mode 100644 index 0000000000..aa3b1c0721 --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/repository/CommunitySubcommunityLinkRepositoryIT.java @@ -0,0 +1,80 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.repository; + +import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath; +import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; + +import org.dspace.app.rest.matcher.CommunityMatcher; +import org.dspace.app.rest.test.AbstractControllerIntegrationTest; +import org.dspace.builder.CommunityBuilder; +import org.dspace.content.Community; +import org.hamcrest.Matchers; +import org.junit.Before; +import org.junit.Test; + +/** + * Test class for {@link CommunitySubcommunityLinkRepository} + */ +public class CommunitySubcommunityLinkRepositoryIT extends AbstractControllerIntegrationTest { + + Community parentCommunity; + Community subCommunity1; + Community subCommunity2; + Community subCommunity3; + + @Before + @Override + public void setUp() throws Exception { + super.setUp(); + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .build(); + subCommunity1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub community 1") + .build(); + subCommunity2 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub community 2") + .build(); + subCommunity3 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub community 3") + .build(); + context.commit(); + context.restoreAuthSystemState(); + } + + @Test + public void getSubCommunities_sortTitleASC() throws Exception { + String adminToken = getAuthToken(admin.getEmail(), password); + + getClient(adminToken).perform(get("/api/core/communities/" + parentCommunity.getID() + "/subcommunities") + .param("sort", "dc.title,ASC")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.subcommunities", Matchers.contains( + CommunityMatcher.matchCommunity(subCommunity1), + CommunityMatcher.matchCommunity(subCommunity2), + CommunityMatcher.matchCommunity(subCommunity3) + ))); + } + + @Test + public void getSubCommunities_sortTitleDESC() throws Exception { + String adminToken = getAuthToken(admin.getEmail(), password); + + getClient(adminToken).perform(get("/api/core/communities/" + parentCommunity.getID() + "/subcommunities") + .param("sort", "dc.title,DESC")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.subcommunities", Matchers.contains( + CommunityMatcher.matchCommunity(subCommunity3), + CommunityMatcher.matchCommunity(subCommunity2), + CommunityMatcher.matchCommunity(subCommunity1) + ))); + } + +} From 0783f7ff8285a9f0701a1ae125f5d85d348b23f4 Mon Sep 17 00:00:00 2001 From: aroman-arvo Date: Mon, 8 May 2023 08:37:55 +0200 Subject: [PATCH 053/125] DS-8636 - all files option fixed --- .../dspace/app/requestitem/RequestItemEmailNotifier.java | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java index 61b42fd185..4171744c5e 100644 --- a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java +++ b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java @@ -167,9 +167,12 @@ public class RequestItemEmailNotifier { if (!bitstream.getFormat(context).isInternal() && requestItemService.isRestricted(context, bitstream)) { - email.addAttachment(bitstreamService.retrieve(context, - bitstream), bitstream.getName(), - bitstream.getFormat(context).getMIMEType()); + // #8636 Anyone receiving the email can respond to the request without authenticating into DSpace + context.turnOffAuthorisationSystem(); + email.addAttachment(bitstreamService.retrieve(context, + bitstream), bitstream.getName(), + bitstream.getFormat(context).getMIMEType()); + context.restoreAuthSystemState(); } } } From 3c37dd45193304ffccac926cfa6a05f714610b40 Mon Sep 17 00:00:00 2001 From: aroman-arvo Date: Mon, 8 May 2023 08:44:49 +0200 Subject: [PATCH 054/125] ds-8636 - indentation --- .../app/requestitem/RequestItemEmailNotifier.java | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java index 4171744c5e..c804ac71d9 100644 --- a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java +++ b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java @@ -167,12 +167,12 @@ public class RequestItemEmailNotifier { if (!bitstream.getFormat(context).isInternal() && requestItemService.isRestricted(context, bitstream)) { - // #8636 Anyone receiving the email can respond to the request without authenticating into DSpace - context.turnOffAuthorisationSystem(); - email.addAttachment(bitstreamService.retrieve(context, - bitstream), bitstream.getName(), - bitstream.getFormat(context).getMIMEType()); - context.restoreAuthSystemState(); + // #8636 Anyone receiving the email can respond to the request without authenticating into DSpace + context.turnOffAuthorisationSystem(); + email.addAttachment(bitstreamService.retrieve(context, + bitstream), bitstream.getName(), + bitstream.getFormat(context).getMIMEType()); + context.restoreAuthSystemState(); } } } From 909003bfac9945d100f1d9b4a8d4be5881088376 Mon Sep 17 00:00:00 2001 From: aroman-arvo Date: Mon, 8 May 2023 09:01:05 +0200 Subject: [PATCH 055/125] Fix style --- .../org/dspace/app/requestitem/RequestItemEmailNotifier.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java index c804ac71d9..caefb4bf40 100644 --- a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java +++ b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java @@ -167,7 +167,8 @@ public class RequestItemEmailNotifier { if (!bitstream.getFormat(context).isInternal() && requestItemService.isRestricted(context, bitstream)) { - // #8636 Anyone receiving the email can respond to the request without authenticating into DSpace + // #8636 Anyone receiving the email can respond to the + // request without authenticating into DSpace context.turnOffAuthorisationSystem(); email.addAttachment(bitstreamService.retrieve(context, bitstream), bitstream.getName(), From 6b10fc3783055bba4fbd291726d574752b22aca3 Mon Sep 17 00:00:00 2001 From: aroman-arvo Date: Mon, 8 May 2023 09:15:05 +0200 Subject: [PATCH 056/125] Fix Style --- .../org/dspace/app/requestitem/RequestItemEmailNotifier.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java index caefb4bf40..8d19597547 100644 --- a/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java +++ b/dspace-api/src/main/java/org/dspace/app/requestitem/RequestItemEmailNotifier.java @@ -167,7 +167,7 @@ public class RequestItemEmailNotifier { if (!bitstream.getFormat(context).isInternal() && requestItemService.isRestricted(context, bitstream)) { - // #8636 Anyone receiving the email can respond to the + // #8636 Anyone receiving the email can respond to the // request without authenticating into DSpace context.turnOffAuthorisationSystem(); email.addAttachment(bitstreamService.retrieve(context, From ccf9bc2d98fae70308cdfc9cb8f0978701c420ec Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Paulo=20Gra=C3=A7a?= Date: Tue, 9 May 2023 17:20:44 +0100 Subject: [PATCH 057/125] Update submission-forms.xml --- dspace/config/submission-forms.xml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/dspace/config/submission-forms.xml b/dspace/config/submission-forms.xml index 8aee946d84..39a4778356 100644 --- a/dspace/config/submission-forms.xml +++ b/dspace/config/submission-forms.xml @@ -910,9 +910,9 @@ - dc - dcterms - references + dcterms + references + false onebox From 0528b4dd30876c790bd81eda12e7dcb36ac9f602 Mon Sep 17 00:00:00 2001 From: Kristof De Langhe Date: Thu, 11 May 2023 11:16:16 +0200 Subject: [PATCH 058/125] 100414: Missing search result statistics code cleanup + extra test --- .../rest/converter/SearchEventConverter.java | 31 ++++++++-- .../app/rest/model/SearchEventRest.java | 10 +-- .../app/rest/utils/DSpaceObjectResolver.java | 61 ------------------- .../app/rest/SearchEventRestRepositoryIT.java | 61 ++++++++++++++++++- 4 files changed, 88 insertions(+), 75 deletions(-) delete mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/DSpaceObjectResolver.java diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SearchEventConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SearchEventConverter.java index a8203e272f..126d37ba1a 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SearchEventConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/SearchEventConverter.java @@ -7,16 +7,20 @@ */ package org.dspace.app.rest.converter; +import java.sql.SQLException; +import java.util.Arrays; import java.util.LinkedList; import java.util.List; import javax.servlet.http.HttpServletRequest; +import org.apache.log4j.Logger; import org.dspace.app.rest.model.PageRest; import org.dspace.app.rest.model.SearchEventRest; import org.dspace.app.rest.model.SearchResultsRest; -import org.dspace.app.rest.utils.DSpaceObjectResolver; import org.dspace.app.rest.utils.ScopeResolver; +import org.dspace.app.util.service.DSpaceObjectUtils; import org.dspace.content.DSpaceObject; +import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.discovery.IndexableObject; import org.dspace.usage.UsageEvent; @@ -26,22 +30,37 @@ import org.springframework.stereotype.Component; @Component public class SearchEventConverter { + /* Log4j logger */ + private static final Logger log = Logger.getLogger(SearchEventConverter.class); @Autowired private ScopeResolver scopeResolver; @Autowired - private DSpaceObjectResolver dSpaceObjectResolver; + private DSpaceObjectUtils dSpaceObjectUtils; + + private final Integer[] allowedClickedObjectTypes = + new Integer[]{Constants.COMMUNITY, Constants.COLLECTION, Constants.ITEM}; public UsageSearchEvent convert(Context context, HttpServletRequest request, SearchEventRest searchEventRest) { UsageSearchEvent usageSearchEvent = new UsageSearchEvent(UsageEvent.Action.SEARCH, request, context, null); usageSearchEvent.setQuery(searchEventRest.getQuery()); usageSearchEvent.setDsoType(searchEventRest.getDsoType()); - if (searchEventRest.getObject() != null) { - IndexableObject object = dSpaceObjectResolver.resolveObject(context, searchEventRest.getObject()); - if (object != null && object.getIndexedObject() instanceof DSpaceObject) { - usageSearchEvent.setObject((DSpaceObject) object.getIndexedObject()); + if (searchEventRest.getClickedObject() != null) { + try { + DSpaceObject clickedObject = + dSpaceObjectUtils.findDSpaceObject(context, searchEventRest.getClickedObject()); + if (clickedObject != null && + Arrays.asList(allowedClickedObjectTypes).contains(clickedObject.getType())) { + usageSearchEvent.setObject(clickedObject); + } else { + throw new IllegalArgumentException("UUID " + searchEventRest.getClickedObject() + + " was expected to resolve to a Community, Collection or Item, but didn't resolve to any"); + } + } catch (SQLException e) { + log.warn("Unable to retrieve DSpace Object with ID " + searchEventRest.getClickedObject() + + " from the database", e); } } if (searchEventRest.getScope() != null) { diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SearchEventRest.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SearchEventRest.java index 637acb9bfd..46827711f2 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SearchEventRest.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SearchEventRest.java @@ -25,7 +25,7 @@ public class SearchEventRest extends BaseObjectRest { private UUID scope; private String configuration; private String dsoType; - private UUID object; + private UUID clickedObject; private List appliedFilters; private SearchResultsRest.Sorting sort; private PageRest page; @@ -99,11 +99,11 @@ public class SearchEventRest extends BaseObjectRest { this.dsoType = dsoType; } - public UUID getObject() { - return object; + public UUID getClickedObject() { + return clickedObject; } - public void setObject(UUID object) { - this.object = object; + public void setClickedObject(UUID clickedObject) { + this.clickedObject = clickedObject; } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/DSpaceObjectResolver.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/DSpaceObjectResolver.java deleted file mode 100644 index 7ded06bdf5..0000000000 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/utils/DSpaceObjectResolver.java +++ /dev/null @@ -1,61 +0,0 @@ -/** - * The contents of this file are subject to the license and copyright - * detailed in the LICENSE and NOTICE files at the root of the source - * tree and available online at - * - * http://www.dspace.org/license/ - */ -package org.dspace.app.rest.utils; - -import java.sql.SQLException; -import java.util.UUID; - -import org.apache.log4j.Logger; -import org.dspace.content.service.CollectionService; -import org.dspace.content.service.CommunityService; -import org.dspace.content.service.ItemService; -import org.dspace.core.Context; -import org.dspace.discovery.IndexableObject; -import org.dspace.discovery.indexobject.IndexableCollection; -import org.dspace.discovery.indexobject.IndexableCommunity; -import org.dspace.discovery.indexobject.IndexableItem; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.stereotype.Component; - -@Component -public class DSpaceObjectResolver { - /* Log4j logger */ - private static final Logger log = Logger.getLogger(DSpaceObjectResolver.class); - - @Autowired - ItemService itemService; - - @Autowired - CollectionService collectionService; - - @Autowired - CommunityService communityService; - - public IndexableObject resolveObject(Context context, UUID uuid) { - IndexableObject object = null; - if (uuid != null) { - try { - object = new IndexableCommunity(communityService.find(context, uuid)); - if (object.getIndexedObject() == null) { - object = new IndexableCollection(collectionService.find(context, uuid)); - } - if (object.getIndexedObject() == null) { - object = new IndexableItem(itemService.find(context, uuid)); - } - if (object.getIndexedObject() == null) { - throw new IllegalArgumentException("UUID " + uuid + " is expected to resolve to a Community, " + - "Collection or Item, but didn't resolve to any"); - } - } catch (SQLException e) { - log.warn("Unable to retrieve DSpace Object with ID " + uuid + " from the database", e); - } - } - return object; - } - -} diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SearchEventRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SearchEventRestRepositoryIT.java index 63ca4b735d..978d8feb58 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/SearchEventRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/SearchEventRestRepositoryIT.java @@ -413,7 +413,7 @@ public class SearchEventRestRepositoryIT extends AbstractControllerIntegrationTe } @Test - public void postTestWithObjectSuccess() throws Exception { + public void postTestWithClickedObjectSuccess() throws Exception { context.turnOffAuthorisationSystem(); @@ -441,10 +441,10 @@ public class SearchEventRestRepositoryIT extends AbstractControllerIntegrationTe SearchEventRest searchEventRest = new SearchEventRest(); searchEventRest.setQuery("test"); - searchEventRest.setScope(publicItem1.getID()); + searchEventRest.setScope(col1.getID()); searchEventRest.setConfiguration("default"); searchEventRest.setDsoType("item"); - searchEventRest.setObject(publicItem1.getID()); + searchEventRest.setClickedObject(publicItem1.getID()); SearchResultsRest.Sorting sort = new SearchResultsRest.Sorting("title", "desc"); searchEventRest.setSort(sort); @@ -466,4 +466,59 @@ public class SearchEventRestRepositoryIT extends AbstractControllerIntegrationTe .andExpect(status().isCreated()); } + + @Test + public void postTestWithClickedObjectNotExisting() throws Exception { + + context.turnOffAuthorisationSystem(); + + //** GIVEN ** + //1. A community-collection structure with one parent community with sub-community and two collections. + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Community child1 = CommunityBuilder.createSubCommunity(context, parentCommunity) + .withName("Sub Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, child1).withName("Collection 1").build(); + Collection col2 = CollectionBuilder.createCollection(context, child1).withName("Collection 2").build(); + + //2. Three public items that are readable by Anonymous with different subjects + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Public item 1") + .withIssueDate("2017-10-17") + .withAuthor("Smith, Donald").withAuthor("Doe, John") + .withSubject("ExtraEntry") + .build(); + + context.restoreAuthSystemState(); + + SearchEventRest searchEventRest = new SearchEventRest(); + + searchEventRest.setQuery("test"); + searchEventRest.setScope(col1.getID()); + searchEventRest.setConfiguration("default"); + searchEventRest.setDsoType("item"); + searchEventRest.setClickedObject(UUID.randomUUID()); + + SearchResultsRest.Sorting sort = new SearchResultsRest.Sorting("title", "desc"); + searchEventRest.setSort(sort); + + PageRest pageRest = new PageRest(5, 20, 4, 1); + searchEventRest.setPage(pageRest); + + SearchResultsRest.AppliedFilter appliedFilter = + new SearchResultsRest.AppliedFilter("author", "contains", "test","test"); + List appliedFilterList = new LinkedList<>(); + appliedFilterList.add(appliedFilter); + searchEventRest.setAppliedFilters(appliedFilterList); + + ObjectMapper mapper = new ObjectMapper(); + + getClient().perform(post("/api/statistics/searchevents") + .content(mapper.writeValueAsBytes(searchEventRest)) + .contentType(contentType)) + .andExpect(status().isBadRequest()); + + } } From c670251a68433cfaecfa73f65665b9892aef22fe Mon Sep 17 00:00:00 2001 From: Jens Vannerum Date: Thu, 11 May 2023 17:13:13 +0200 Subject: [PATCH 059/125] 94299: Fix minor issues --- .../rest/BitstreamCategoryRestController.java | 1 - .../operation/BitstreamRemoveOperation.java | 15 +- .../app/rest/BitstreamRestRepositoryIT.java | 299 ++++++++++++++++++ 3 files changed, 313 insertions(+), 2 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/BitstreamCategoryRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/BitstreamCategoryRestController.java index 13929e5a9a..6d970eb109 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/BitstreamCategoryRestController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/BitstreamCategoryRestController.java @@ -51,7 +51,6 @@ public class BitstreamCategoryRestController { * @throws SQLException if an error occurs while accessing the database. * @throws AuthorizeException if the user is not authorized to perform the requested operation. */ - @PreAuthorize("hasAuthority('ADMIN')") @RequestMapping(method = RequestMethod.PATCH) public ResponseEntity> patch(HttpServletRequest request, @RequestBody(required = true) JsonNode jsonNode) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/BitstreamRemoveOperation.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/BitstreamRemoveOperation.java index 93c495a302..7733600271 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/BitstreamRemoveOperation.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/BitstreamRemoveOperation.java @@ -14,10 +14,13 @@ import java.util.UUID; import org.dspace.app.rest.exception.RESTBitstreamNotFoundException; import org.dspace.app.rest.model.patch.Operation; import org.dspace.authorize.AuthorizeException; +import org.dspace.authorize.service.AuthorizeService; import org.dspace.content.Bitstream; import org.dspace.content.service.BitstreamService; +import org.dspace.core.Constants; import org.dspace.core.Context; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.security.access.AccessDeniedException; import org.springframework.stereotype.Component; /** @@ -38,6 +41,8 @@ import org.springframework.stereotype.Component; public class BitstreamRemoveOperation extends PatchOperation { @Autowired BitstreamService bitstreamService; + @Autowired + AuthorizeService authorizeService; public static final String OPERATION_PATH_BITSTREAM_REMOVE = "/bitstreams/"; @Override @@ -47,10 +52,10 @@ public class BitstreamRemoveOperation extends PatchOperation { if (bitstreamToDelete == null) { throw new RESTBitstreamNotFoundException(bitstreamIDtoDelete); } + authorizeBitstreamRemoveAction(context, bitstreamToDelete, Constants.DELETE); try { bitstreamService.delete(context, bitstreamToDelete); - bitstreamService.update(context, bitstreamToDelete); } catch (AuthorizeException | IOException e) { throw new RuntimeException(e.getMessage(), e); } @@ -62,4 +67,12 @@ public class BitstreamRemoveOperation extends PatchOperation { return objectToMatch == null && operation.getOp().trim().equalsIgnoreCase(OPERATION_REMOVE) && operation.getPath().trim().startsWith(OPERATION_PATH_BITSTREAM_REMOVE); } + + public void authorizeBitstreamRemoveAction(Context context, Bitstream bitstream, int operation) throws SQLException { + try { + authorizeService.authorizeAction(context, bitstream, operation); + } catch (AuthorizeException e) { + throw new AccessDeniedException("The current user is not allowed to remove the bitstream", e); + } + } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java index 3b01b4eac2..2d855a06c2 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java @@ -46,6 +46,7 @@ import org.dspace.builder.BitstreamBuilder; import org.dspace.builder.BundleBuilder; import org.dspace.builder.CollectionBuilder; import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.EPersonBuilder; import org.dspace.builder.ItemBuilder; import org.dspace.builder.ResourcePolicyBuilder; import org.dspace.content.Bitstream; @@ -56,6 +57,8 @@ import org.dspace.content.Community; import org.dspace.content.Item; import org.dspace.content.service.BitstreamFormatService; import org.dspace.content.service.BitstreamService; +import org.dspace.content.service.CollectionService; +import org.dspace.content.service.CommunityService; import org.dspace.content.service.ItemService; import org.dspace.core.Constants; import org.dspace.eperson.EPerson; @@ -86,6 +89,12 @@ public class BitstreamRestRepositoryIT extends AbstractControllerIntegrationTest @Autowired private ItemService itemService; + @Autowired + CollectionService collectionService; + + @Autowired + CommunityService communityService; + @Test public void findAllTest() throws Exception { //We turn off the authorization system in order to create the structure as defined below @@ -2490,6 +2499,296 @@ public class BitstreamRestRepositoryIT extends AbstractControllerIntegrationTest Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4)); } + @Test + public void deleteBitstreamsInBulk_Unauthorized() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Collection collection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .build(); + Item publicItem1 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 1") + .build(); + Item publicItem2 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 2") + .build(); + + String bitstreamContent = "This is an archived bitstream"; + Bitstream bitstream1 = null; + Bitstream bitstream2 = null; + Bitstream bitstream3 = null; + Bitstream bitstream4 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 1") + .withMimeType("text/plain") + .build(); + bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 2") + .withMimeType("text/plain") + .build(); + bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 3") + .withMimeType("text/plain") + .build(); + bitstream4 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 4") + .withMimeType("text/plain") + .build(); + } + context.restoreAuthSystemState(); + + // Add three out of four bitstreams to the list of bitstreams to be deleted + List ops = new ArrayList<>(); + RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID()); + ops.add(removeOp1); + RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID()); + ops.add(removeOp2); + RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID()); + ops.add(removeOp3); + String patchBody = getPatchContent(ops); + String token = getAuthToken(admin.getEmail(), password); + + Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4)); + + getClient().perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isUnauthorized()); + } + + @Test + public void deleteBitstreamsInBulk_Forbidden() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Collection collection = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection") + .build(); + Item publicItem1 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 1") + .build(); + Item publicItem2 = ItemBuilder.createItem(context, collection) + .withTitle("Test item 2") + .build(); + + String bitstreamContent = "This is an archived bitstream"; + Bitstream bitstream1 = null; + Bitstream bitstream2 = null; + Bitstream bitstream3 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 1") + .withMimeType("text/plain") + .build(); + bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 2") + .withMimeType("text/plain") + .build(); + bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 3") + .withMimeType("text/plain") + .build(); + } + context.restoreAuthSystemState(); + + // Add three out of four bitstreams to the list of bitstreams to be deleted + List ops = new ArrayList<>(); + RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID()); + ops.add(removeOp1); + RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID()); + ops.add(removeOp2); + RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID()); + ops.add(removeOp3); + String patchBody = getPatchContent(ops); + String token = getAuthToken(eperson.getEmail(), password); + + getClient(token).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isForbidden()); + } + + @Test + public void deleteBitstreamsInBulk_collectionAdmin() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 1") + .build(); + Collection col2 = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 2") + .build(); + EPerson col1Admin = EPersonBuilder.createEPerson(context) + .withEmail("col1admin@test.com") + .withPassword(password) + .build(); + EPerson col2Admin = EPersonBuilder.createEPerson(context) + .withEmail("col2admin@test.com") + .withPassword(password) + .build(); + Group col1_AdminGroup = collectionService.createAdministrators(context, col1); + Group col2_AdminGroup = collectionService.createAdministrators(context, col2); + groupService.addMember(context, col1_AdminGroup, col1Admin); + groupService.addMember(context, col2_AdminGroup, col2Admin); + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Test item 1") + .build(); + Item publicItem2 = ItemBuilder.createItem(context, col2) + .withTitle("Test item 2") + .build(); + + String bitstreamContent = "This is an archived bitstream"; + Bitstream bitstream1 = null; + Bitstream bitstream2 = null; + Bitstream bitstream3 = null; + Bitstream bitstream4 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 1") + .withMimeType("text/plain") + .build(); + bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 2") + .withMimeType("text/plain") + .build(); + bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 3") + .withMimeType("text/plain") + .build(); + bitstream4 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 4") + .withMimeType("text/plain") + .build(); + } + context.restoreAuthSystemState(); + + // Add three out of four bitstreams to the list of bitstreams to be deleted + List ops = new ArrayList<>(); + RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID()); + ops.add(removeOp1); + RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID()); + ops.add(removeOp2); + RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID()); + ops.add(removeOp3); + String patchBody = getPatchContent(ops); + + String token = getAuthToken(col1Admin.getEmail(), password); + // Should return forbidden since one of the bitstreams does not originate form collection 1 + getClient(token).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isForbidden()); + + // Remove the bitstream that does not originate from the collection we are administrator of, should return OK + ops.remove(2); + patchBody = getPatchContent(ops); + getClient(token).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isNoContent()); + + // Change the token to the admin of collection 2 + token = getAuthToken(col2Admin.getEmail(), password); + + // Add three out of four bitstreams to the list of bitstreams to be deleted + ops = new ArrayList<>(); + removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID()); + ops.add(removeOp1); + removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID()); + ops.add(removeOp2); + removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream4.getID()); + ops.add(removeOp3); + patchBody = getPatchContent(ops); + + // Should return forbidden since one of the bitstreams does not originate form collection 2 + getClient(token).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isForbidden()); + // Remove the bitstream that does not originate from the collection we are administrator of, should return OK + ops.remove(0); + patchBody = getPatchContent(ops); + getClient(token).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isNoContent()); + } + + @Test + public void deleteBitstreamsInBulk_communityAdmin() throws Exception { + context.turnOffAuthorisationSystem(); + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + Collection col1 = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 1") + .build(); + Collection col2 = CollectionBuilder.createCollection(context, parentCommunity) + .withName("Collection 2") + .build(); + EPerson parentCommunityAdmin = EPersonBuilder.createEPerson(context) + .withEmail("parentComAdmin@test.com") + .withPassword(password) + .build(); + Group parentComAdminGroup = communityService.createAdministrators(context, parentCommunity); + groupService.addMember(context, parentComAdminGroup, parentCommunityAdmin); + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Test item 1") + .build(); + Item publicItem2 = ItemBuilder.createItem(context, col2) + .withTitle("Test item 2") + .build(); + + String bitstreamContent = "This is an archived bitstream"; + Bitstream bitstream1 = null; + Bitstream bitstream2 = null; + Bitstream bitstream3 = null; + Bitstream bitstream4 = null; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream1 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 1") + .withMimeType("text/plain") + .build(); + bitstream2 = BitstreamBuilder.createBitstream(context, publicItem1, is) + .withName("Bitstream 2") + .withMimeType("text/plain") + .build(); + bitstream3 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 3") + .withMimeType("text/plain") + .build(); + bitstream4 = BitstreamBuilder.createBitstream(context, publicItem2, is) + .withName("Bitstream 4") + .withMimeType("text/plain") + .build(); + } + context.restoreAuthSystemState(); + + // Add three out of four bitstreams to the list of bitstreams to be deleted + List ops = new ArrayList<>(); + RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID()); + ops.add(removeOp1); + RemoveOperation removeOp2 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream2.getID()); + ops.add(removeOp2); + RemoveOperation removeOp3 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream3.getID()); + ops.add(removeOp3); + String patchBody = getPatchContent(ops); + + String token = getAuthToken(parentCommunityAdmin.getEmail(), password); + // Bitstreams originate from two different collections, but those collections live in the same community, so + // a community admin should be able to delete them + getClient(token).perform(patch("/api/core/bitstreams") + .content(patchBody) + .contentType(MediaType.APPLICATION_JSON_PATCH_JSON)) + .andExpect(status().isNoContent()); + } + public boolean bitstreamExists(String token, Bitstream ...bitstreams) throws Exception { for (Bitstream bitstream : bitstreams) { if (getClient(token).perform(get("/api/core/bitstreams/" + bitstream.getID())) From 7c7824f913c5db9cffafb5afed8394cc441d5c8c Mon Sep 17 00:00:00 2001 From: Yana De Pauw Date: Thu, 11 May 2023 17:57:45 +0200 Subject: [PATCH 060/125] Implement community feedback --- .../DiscoveryConfigurationService.java | 3 +- .../DiscoveryScopeBasedRestControllerIT.java | 52 +++++++++++++++++++ 2 files changed, 54 insertions(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java index da23b87a35..1a1ed95a29 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java +++ b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoveryConfigurationService.java @@ -13,6 +13,7 @@ import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.UUID; +import java.util.concurrent.ConcurrentHashMap; import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.LogManager; @@ -42,7 +43,7 @@ public class DiscoveryConfigurationService { * own configuration, we take the one of the first parent that does. * This cache ensures we do not have to go up the hierarchy every time. */ - private final Map comColToDiscoveryConfigurationMap = new HashMap<>(); + private final Map comColToDiscoveryConfigurationMap = new ConcurrentHashMap<>(); public Map getMap() { return map; diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java index 0c8735545e..a3408a7736 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryScopeBasedRestControllerIT.java @@ -58,6 +58,9 @@ import org.springframework.beans.factory.annotation.Autowired; * * The tests will verify that for each object, the correct facets are provided and that all the necessary fields to * power these facets are indexed properly. + * + * This file requires the discovery configuration in the following test file: + * src/test/data/dspaceFolder/config/spring/api/test-discovery.xml */ public class DiscoveryScopeBasedRestControllerIT extends AbstractControllerIntegrationTest { @@ -263,6 +266,9 @@ public class DiscoveryScopeBasedRestControllerIT extends AbstractControllerInteg } @Test + /** + * Verify that the custom configuration "discovery-parent-community-1" is correctly used for Parent Community 1. + */ public void ScopeBasedIndexingAndSearchTestParentCommunity1() throws Exception { getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(parentCommunity1.getID()))) @@ -301,6 +307,9 @@ public class DiscoveryScopeBasedRestControllerIT extends AbstractControllerInteg } @Test + /** + * Verify that the custom configuration "discovery-sub-community-1-1" is correctly used for Subcommunity 11. + */ public void ScopeBasedIndexingAndSearchTestSubCommunity11() throws Exception { getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(subcommunity11.getID()))) @@ -330,6 +339,9 @@ public class DiscoveryScopeBasedRestControllerIT extends AbstractControllerInteg } @Test + /** + * Verify that the custom configuration "discovery-collection-1-1-1" is correctly used for Collection 111. + */ public void ScopeBasedIndexingAndSearchTestCollection111() throws Exception { getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection111.getID()))) @@ -357,6 +369,10 @@ public class DiscoveryScopeBasedRestControllerIT extends AbstractControllerInteg } @Test + /** + * Verify that the first encountered custom parent configuration "discovery-sub-community-1-1" is inherited + * correctly for Collection 112. + */ public void ScopeBasedIndexingAndSearchTestCollection112() throws Exception { getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection112.getID()))) @@ -382,6 +398,10 @@ public class DiscoveryScopeBasedRestControllerIT extends AbstractControllerInteg } @Test + /** + * Verify that the first encountered custom parent configuration "discovery-parent-community-1" is inherited + * correctly for Subcommunity 12. + */ public void ScopeBasedIndexingAndSearchTestSubcommunity12() throws Exception { getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(subcommunity12.getID()))) @@ -411,6 +431,9 @@ public class DiscoveryScopeBasedRestControllerIT extends AbstractControllerInteg } @Test + /** + * Verify that the custom configuration "discovery-collection-1-2-1" is correctly used for Collection 121. + */ public void ScopeBasedIndexingAndSearchTestCollection121() throws Exception { getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection121.getID()))) @@ -436,6 +459,10 @@ public class DiscoveryScopeBasedRestControllerIT extends AbstractControllerInteg } @Test + /** + * Verify that the first encountered custom parent configuration "discovery-parent-community-1" is inherited + * correctly for Collection 122. + */ public void ScopeBasedIndexingAndSearchTestCollection122() throws Exception { getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection122.getID()))) @@ -463,6 +490,10 @@ public class DiscoveryScopeBasedRestControllerIT extends AbstractControllerInteg } @Test + /** + * Verify that the default configuration is inherited correctly when no other custom configuration can be inherited + * for Parent Community 2. + */ public void ScopeBasedIndexingAndSearchTestParentCommunity2() throws Exception { getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(parentCommunity2.getID()))) @@ -481,6 +512,9 @@ public class DiscoveryScopeBasedRestControllerIT extends AbstractControllerInteg } @Test + /** + * Verify that the custom configuration "discovery-sub-community-2-1" is correctly used for Subcommunity 21. + */ public void ScopeBasedIndexingAndSearchTestSubCommunity21() throws Exception { getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(subcommunity21.getID()))) @@ -510,6 +544,9 @@ public class DiscoveryScopeBasedRestControllerIT extends AbstractControllerInteg } @Test + /** + * Verify that the custom configuration "discovery-collection-2-1-1" is correctly used for Collection 211. + */ public void ScopeBasedIndexingAndSearchTestCollection211() throws Exception { getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection211.getID()))) @@ -537,6 +574,10 @@ public class DiscoveryScopeBasedRestControllerIT extends AbstractControllerInteg } @Test + /** + * Verify that the first encountered custom parent configuration "discovery-sub-community-2-1" is inherited + * correctly for Collection 212. + */ public void ScopeBasedIndexingAndSearchTestCollection212() throws Exception { getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection212.getID()))) @@ -562,6 +603,10 @@ public class DiscoveryScopeBasedRestControllerIT extends AbstractControllerInteg } @Test + /** + * Verify that the default configuration is inherited correctly when no other custom configuration can be inherited + * for Subcommunity 22. + */ public void ScopeBasedIndexingAndSearchTestSubcommunity22() throws Exception { getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(subcommunity22.getID()))) @@ -579,6 +624,9 @@ public class DiscoveryScopeBasedRestControllerIT extends AbstractControllerInteg } @Test + /** + * Verify that the custom configuration "discovery-collection-2-2-1" is correctly used for Collection 221. + */ public void ScopeBasedIndexingAndSearchTestCollection221() throws Exception { getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection221.getID()))) @@ -604,6 +652,10 @@ public class DiscoveryScopeBasedRestControllerIT extends AbstractControllerInteg } @Test + /** + * Verify that the default configuration is inherited correctly when no other custom configuration can be inherited + * for Collection 222. + */ public void ScopeBasedIndexingAndSearchTestCollection222() throws Exception { getClient().perform(get("/api/discover/facets").param("scope", String.valueOf(collection222.getID()))) From 78fba6b579c3af233c80861c6efb90491cb8d925 Mon Sep 17 00:00:00 2001 From: Jens Vannerum Date: Thu, 11 May 2023 21:24:31 +0200 Subject: [PATCH 061/125] 94299: checkstyle --- .../org/dspace/app/rest/BitstreamCategoryRestController.java | 1 - .../repository/patch/operation/BitstreamRemoveOperation.java | 3 ++- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/BitstreamCategoryRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/BitstreamCategoryRestController.java index 6d970eb109..aa511bcb92 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/BitstreamCategoryRestController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/BitstreamCategoryRestController.java @@ -20,7 +20,6 @@ import org.dspace.core.Context; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.hateoas.RepresentationModel; import org.springframework.http.ResponseEntity; -import org.springframework.security.access.prepost.PreAuthorize; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/BitstreamRemoveOperation.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/BitstreamRemoveOperation.java index 7733600271..b0e2a45c9d 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/BitstreamRemoveOperation.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/patch/operation/BitstreamRemoveOperation.java @@ -68,7 +68,8 @@ public class BitstreamRemoveOperation extends PatchOperation { operation.getPath().trim().startsWith(OPERATION_PATH_BITSTREAM_REMOVE); } - public void authorizeBitstreamRemoveAction(Context context, Bitstream bitstream, int operation) throws SQLException { + public void authorizeBitstreamRemoveAction(Context context, Bitstream bitstream, int operation) + throws SQLException { try { authorizeService.authorizeAction(context, bitstream, operation); } catch (AuthorizeException e) { From 2cf5b290b624efca51c3d2f35f1257bc828bfaba Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ad=C3=A1n=20Rom=C3=A1n=20Ruiz?= <116575333+aroman-arvo@users.noreply.github.com> Date: Thu, 11 May 2023 23:59:38 +0200 Subject: [PATCH 062/125] Fix "Simultaneous deletion of multiple bitstreams from the same bundle often compromises the state of the bundle" (#8778) * DS-8694 * clean imports * cleaning code * DS-8694 - re-deletion of bitstream throw exception * ds-8694 - clean code --- .../dspace/app/rest/repository/DSpaceRestRepository.java | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DSpaceRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DSpaceRestRepository.java index 01f127eca5..a93f5e55dc 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DSpaceRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/DSpaceRestRepository.java @@ -195,7 +195,11 @@ public abstract class DSpaceRestRepository Date: Mon, 17 Apr 2023 14:40:43 +1200 Subject: [PATCH 063/125] Bitstreams should inherit DEFAULT_BITSTREAM_READ, then DEFAULT_ITEM_READ When generating automatic policies, bitstream DSOs have always inherited from DEFAULT_ITEM_READ, even if the collection's DEFAULT_BITSTREAM_READ policy differs. This bugfix checks DEFAULT_BITSTREAM_READ first, and uses DEFAULT_ITEM_READ as a fallback. --- .../org/dspace/authorize/AuthorizeServiceImpl.java | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java b/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java index 34543c078a..e8e639a0b2 100644 --- a/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java @@ -674,8 +674,17 @@ public class AuthorizeServiceImpl implements AuthorizeService { throws SQLException, AuthorizeException { if (embargoDate != null || (embargoDate == null && dso instanceof Bitstream)) { - - List authorizedGroups = getAuthorizedGroups(context, owningCollection, Constants.DEFAULT_ITEM_READ); + // Get DEFAULT_BITSTREAM_READ policy from the collection + List defaultBitstreamReadGroups = + getAuthorizedGroups(context, owningCollection, Constants.DEFAULT_BITSTREAM_READ); + // Get DEFAULT_ITEM_READ policy from the collection + List defaultItemReadGroups = + getAuthorizedGroups(context, owningCollection, Constants.DEFAULT_ITEM_READ); + // By default, use DEFAULT_BITSTREAM_READ. Otherwise, use DEFAULT_ITEM_READ + List authorizedGroups = defaultBitstreamReadGroups; + if (defaultBitstreamReadGroups.isEmpty()) { + authorizedGroups = defaultItemReadGroups; + } removeAllPoliciesByDSOAndType(context, dso, ResourcePolicy.TYPE_CUSTOM); From 019d0319dc8174611f5a37b9b1c4960baf2612f2 Mon Sep 17 00:00:00 2001 From: Kim Shepherd Date: Fri, 12 May 2023 14:01:14 +1200 Subject: [PATCH 064/125] #8783 New bitstreams inherit DEFAULT_BITSTREAM_READ from owning collection Otherwise will keep inherited READ from bundle. Does not affect embargos set during submission or workflow, these will always be applied afterwards. --- .../org/dspace/content/BundleServiceImpl.java | 23 +++++++ .../app/rest/BitstreamRestControllerIT.java | 60 ++++++++++++++++++- 2 files changed, 81 insertions(+), 2 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/content/BundleServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/BundleServiceImpl.java index 485f1d6451..01bee702bb 100644 --- a/dspace-api/src/main/java/org/dspace/content/BundleServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/BundleServiceImpl.java @@ -34,6 +34,7 @@ import org.dspace.content.service.ItemService; import org.dspace.core.Constants; import org.dspace.core.Context; import org.dspace.core.LogHelper; +import org.dspace.eperson.Group; import org.dspace.event.Event; import org.springframework.beans.factory.annotation.Autowired; @@ -173,6 +174,28 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl implement // copy authorization policies from bundle to bitstream // FIXME: multiple inclusion is affected by this... authorizeService.inheritPolicies(context, bundle, bitstream); + if (owningItem != null) { + // Resolve owning collection + Collection owningCollection = owningItem.getOwningCollection(); + if (owningCollection != null) { + // Get DEFAULT_BITSTREAM_READ policy from the collection + List defaultBitstreamReadGroups = + authorizeService.getAuthorizedGroups(context, owningCollection, + Constants.DEFAULT_BITSTREAM_READ); + log.info(defaultBitstreamReadGroups.size()); + // If this collection is configured with a DEFAULT_BITSTREAM_READ group, overwrite the READ policy + // inherited from the bundle with this policy. + if (!defaultBitstreamReadGroups.isEmpty()) { + // Remove read policies from the bitstream + authorizeService.removePoliciesActionFilter(context, bitstream, Constants.READ); + for (Group defaultBitstreamReadGroup : defaultBitstreamReadGroups) { + // Inherit this policy as READ, directly from the collection roles + authorizeService.addPolicy(context, bitstream, + Constants.READ, defaultBitstreamReadGroup, ResourcePolicy.TYPE_INHERITED); + } + } + } + } bitstreamService.update(context, bitstream); } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java index 09dbdca505..79b7bd22c1 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java @@ -17,8 +17,7 @@ import static org.dspace.builder.BitstreamFormatBuilder.createBitstreamFormat; import static org.dspace.builder.ResourcePolicyBuilder.createResourcePolicy; import static org.dspace.content.BitstreamFormat.KNOWN; import static org.dspace.content.BitstreamFormat.SUPPORTED; -import static org.dspace.core.Constants.READ; -import static org.dspace.core.Constants.WRITE; +import static org.dspace.core.Constants.*; import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.Matchers.equalTo; @@ -56,6 +55,8 @@ import org.apache.pdfbox.pdmodel.PDDocument; import org.apache.pdfbox.text.PDFTextStripper; import org.apache.solr.client.solrj.SolrServerException; import org.dspace.app.rest.test.AbstractControllerIntegrationTest; +import org.dspace.authorize.ResourcePolicy; +import org.dspace.authorize.service.AuthorizeService; import org.dspace.authorize.service.ResourcePolicyService; import org.dspace.builder.BitstreamBuilder; import org.dspace.builder.CollectionBuilder; @@ -70,6 +71,7 @@ import org.dspace.content.Community; import org.dspace.content.Item; import org.dspace.content.service.BitstreamFormatService; import org.dspace.content.service.BitstreamService; +import org.dspace.content.service.CollectionService; import org.dspace.core.Constants; import org.dspace.disseminate.CitationDocumentServiceImpl; import org.dspace.eperson.EPerson; @@ -112,6 +114,12 @@ public class BitstreamRestControllerIT extends AbstractControllerIntegrationTest @Autowired private BitstreamFormatService bitstreamFormatService; + @Autowired + private AuthorizeService authorizeService; + + @Autowired + private CollectionService collectionService; + private Bitstream bitstream; private BitstreamFormat supportedFormat; private BitstreamFormat knownFormat; @@ -626,6 +634,54 @@ public class BitstreamRestControllerIT extends AbstractControllerIntegrationTest } + @Test + public void testBitstreamDefaultReadInheritanceFromCollection() throws Exception { + context.turnOffAuthorisationSystem(); + + //** GIVEN ** + //1. A community-collection structure with one parent community and one collections. + parentCommunity = CommunityBuilder.createCommunity(context) + .withName("Parent Community") + .build(); + + Group internalGroup = GroupBuilder.createGroup(context) + .withName("Internal Group") + .build(); + // Explicitly create a restrictive default bitstream read policy on the collection + Collection col1 = CollectionBuilder.createCollection(context, parentCommunity).withName("Collection 1").build(); + authorizeService.removePoliciesActionFilter(context, col1, DEFAULT_BITSTREAM_READ); + authorizeService.addPolicy(context, col1, DEFAULT_BITSTREAM_READ, internalGroup); + + //2. A public item with a new bitstream that is not explicitly restricted + // but should instead inherit + Item publicItem1 = ItemBuilder.createItem(context, col1) + .withTitle("Public item 1") + .withIssueDate("2017-10-17") + .withAuthor("Smith, Donald").withAuthor("Doe, John") + .build(); + // make sure this item has no default policies for a new bundle to inherit + authorizeService.removePoliciesActionFilter(context, publicItem1, DEFAULT_BITSTREAM_READ); + + String bitstreamContent = "Private!"; + try (InputStream is = IOUtils.toInputStream(bitstreamContent, CharEncoding.UTF_8)) { + bitstream = BitstreamBuilder + .createBitstream(context, publicItem1, is) + .withName("Test Restricted Bitstream") + .withDescription("This bitstream is restricted") + .withMimeType("text/plain") + .build(); + } + context.restoreAuthSystemState(); + //** WHEN ** + //We download the bitstream + getClient().perform(get("/api/core/bitstreams/" + bitstream.getID() + "/content")) + //** THEN ** + .andExpect(status().isUnauthorized()); + + //An unauthorized request should not log statistics + checkNumberOfStatsRecords(bitstream, 0); + } + @Test public void restrictedGroupBitstreamForbiddenTest() throws Exception { context.turnOffAuthorisationSystem(); From 4a88573dc1462080e92735b21f3729e7281c766b Mon Sep 17 00:00:00 2001 From: Kim Shepherd Date: Fri, 12 May 2023 14:18:08 +1200 Subject: [PATCH 065/125] #8783 Strip out unused generateAutomaticPolicies method from AuthorizeService --- .../authorize/AuthorizeServiceImpl.java | 63 ------------------- .../authorize/service/AuthorizeService.java | 20 +----- 2 files changed, 1 insertion(+), 82 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java b/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java index e8e639a0b2..8058caf669 100644 --- a/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/authorize/AuthorizeServiceImpl.java @@ -654,69 +654,6 @@ public class AuthorizeServiceImpl implements AuthorizeService { } } - /** - * Generate Policies policies READ for the date in input adding reason. New policies are assigned automatically - * at the groups that - * have right on the collection. E.g., if the anonymous can access the collection policies are assigned to - * anonymous. - * - * @param context The relevant DSpace Context. - * @param embargoDate embargo end date - * @param reason embargo reason - * @param dso DSpace object - * @param owningCollection collection to get group policies from - * @throws SQLException if database error - * @throws AuthorizeException if authorization error - */ - @Override - public void generateAutomaticPolicies(Context context, Date embargoDate, - String reason, DSpaceObject dso, Collection owningCollection) - throws SQLException, AuthorizeException { - - if (embargoDate != null || (embargoDate == null && dso instanceof Bitstream)) { - // Get DEFAULT_BITSTREAM_READ policy from the collection - List defaultBitstreamReadGroups = - getAuthorizedGroups(context, owningCollection, Constants.DEFAULT_BITSTREAM_READ); - // Get DEFAULT_ITEM_READ policy from the collection - List defaultItemReadGroups = - getAuthorizedGroups(context, owningCollection, Constants.DEFAULT_ITEM_READ); - // By default, use DEFAULT_BITSTREAM_READ. Otherwise, use DEFAULT_ITEM_READ - List authorizedGroups = defaultBitstreamReadGroups; - if (defaultBitstreamReadGroups.isEmpty()) { - authorizedGroups = defaultItemReadGroups; - } - - removeAllPoliciesByDSOAndType(context, dso, ResourcePolicy.TYPE_CUSTOM); - - // look for anonymous - boolean isAnonymousInPlace = false; - for (Group g : authorizedGroups) { - if (StringUtils.equals(g.getName(), Group.ANONYMOUS)) { - isAnonymousInPlace = true; - } - } - if (!isAnonymousInPlace) { - // add policies for all the groups - for (Group g : authorizedGroups) { - ResourcePolicy rp = createOrModifyPolicy(null, context, null, g, null, embargoDate, Constants.READ, - reason, dso); - if (rp != null) { - resourcePolicyService.update(context, rp); - } - } - - } else { - // add policy just for anonymous - ResourcePolicy rp = createOrModifyPolicy(null, context, null, - groupService.findByName(context, Group.ANONYMOUS), null, - embargoDate, Constants.READ, reason, dso); - if (rp != null) { - resourcePolicyService.update(context, rp); - } - } - } - } - @Override public ResourcePolicy createResourcePolicy(Context context, DSpaceObject dso, Group group, EPerson eperson, int type, String rpType) throws SQLException, AuthorizeException { diff --git a/dspace-api/src/main/java/org/dspace/authorize/service/AuthorizeService.java b/dspace-api/src/main/java/org/dspace/authorize/service/AuthorizeService.java index 36679f94c6..14a7ff5c82 100644 --- a/dspace-api/src/main/java/org/dspace/authorize/service/AuthorizeService.java +++ b/dspace-api/src/main/java/org/dspace/authorize/service/AuthorizeService.java @@ -470,24 +470,6 @@ public interface AuthorizeService { public ResourcePolicy findByTypeGroupAction(Context c, DSpaceObject dso, Group group, int action) throws SQLException; - - /** - * Generate Policies policies READ for the date in input adding reason. New policies are assigned automatically - * at the groups that - * have right on the collection. E.g., if the anonymous can access the collection policies are assigned to - * anonymous. - * - * @param context current context - * @param embargoDate date - * @param reason reason - * @param dso DSpaceObject - * @param owningCollection collection - * @throws SQLException if database error - * @throws AuthorizeException if authorization error - */ - public void generateAutomaticPolicies(Context context, Date embargoDate, String reason, DSpaceObject dso, - Collection owningCollection) throws SQLException, AuthorizeException; - public ResourcePolicy createResourcePolicy(Context context, DSpaceObject dso, Group group, EPerson eperson, int type, String rpType) throws SQLException, AuthorizeException; @@ -603,7 +585,7 @@ public interface AuthorizeService { /** * Replace all the policies in the target object with exactly the same policies that exist in the source object - * + * * @param context DSpace Context * @param source source of policies * @param dest destination of inherited policies From c2575b05b2f957adc7e2096f625c7d50264bf657 Mon Sep 17 00:00:00 2001 From: Kim Shepherd Date: Fri, 12 May 2023 14:19:53 +1200 Subject: [PATCH 066/125] #8783 checkstyle fixes for integration test --- .../java/org/dspace/app/rest/BitstreamRestControllerIT.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java index 79b7bd22c1..7a4aa0c007 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestControllerIT.java @@ -17,7 +17,9 @@ import static org.dspace.builder.BitstreamFormatBuilder.createBitstreamFormat; import static org.dspace.builder.ResourcePolicyBuilder.createResourcePolicy; import static org.dspace.content.BitstreamFormat.KNOWN; import static org.dspace.content.BitstreamFormat.SUPPORTED; -import static org.dspace.core.Constants.*; +import static org.dspace.core.Constants.DEFAULT_BITSTREAM_READ; +import static org.dspace.core.Constants.READ; +import static org.dspace.core.Constants.WRITE; import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.Matchers.equalTo; @@ -55,7 +57,6 @@ import org.apache.pdfbox.pdmodel.PDDocument; import org.apache.pdfbox.text.PDFTextStripper; import org.apache.solr.client.solrj.SolrServerException; import org.dspace.app.rest.test.AbstractControllerIntegrationTest; -import org.dspace.authorize.ResourcePolicy; import org.dspace.authorize.service.AuthorizeService; import org.dspace.authorize.service.ResourcePolicyService; import org.dspace.builder.BitstreamBuilder; From b69517da7444d23df76a54873a55b4936d15e2e3 Mon Sep 17 00:00:00 2001 From: Kim Shepherd Date: Fri, 12 May 2023 17:47:56 +1200 Subject: [PATCH 067/125] #8783 don't inherit DEFAULT_BITSTREAM_READ if item has current embargo --- .../org/dspace/content/BundleServiceImpl.java | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/content/BundleServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/BundleServiceImpl.java index 01bee702bb..3fb78ee6c8 100644 --- a/dspace-api/src/main/java/org/dspace/content/BundleServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/BundleServiceImpl.java @@ -7,13 +7,12 @@ */ package org.dspace.content; -import static org.dspace.core.Constants.ADD; -import static org.dspace.core.Constants.REMOVE; -import static org.dspace.core.Constants.WRITE; +import static org.dspace.core.Constants.*; import java.io.IOException; import java.sql.SQLException; import java.util.ArrayList; +import java.util.Date; import java.util.Iterator; import java.util.LinkedList; import java.util.List; @@ -174,7 +173,18 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl implement // copy authorization policies from bundle to bitstream // FIXME: multiple inclusion is affected by this... authorizeService.inheritPolicies(context, bundle, bitstream); - if (owningItem != null) { + // The next logic is a bit overly cautious but ensures that if there are any future start dates + // on the item or bitstream read policies, that we'll skip inheriting anything from the owning collection + // just in case. In practice, the item install process would overwrite these anyway but it may satisfy + // some other bitstream creation methods and integration tests + boolean isEmbargoed = false; + for (ResourcePolicy resourcePolicy : authorizeService.getPoliciesActionFilter(context, owningItem, READ)) { + if (!resourcePolicyService.isDateValid(resourcePolicy)) { + isEmbargoed = true; + break; + } + } + if (owningItem != null && !isEmbargoed) { // Resolve owning collection Collection owningCollection = owningItem.getOwningCollection(); if (owningCollection != null) { From 694339993840a4378207c7846bf78f69fe7d318a Mon Sep 17 00:00:00 2001 From: Kim Shepherd Date: Fri, 12 May 2023 18:03:02 +1200 Subject: [PATCH 068/125] #8783 checkstyle fixes --- .../org/dspace/content/BundleServiceImpl.java | 66 ++++++++++--------- 1 file changed, 34 insertions(+), 32 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/content/BundleServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/BundleServiceImpl.java index 3fb78ee6c8..20c43e4bfc 100644 --- a/dspace-api/src/main/java/org/dspace/content/BundleServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/BundleServiceImpl.java @@ -7,12 +7,14 @@ */ package org.dspace.content; -import static org.dspace.core.Constants.*; +import static org.dspace.core.Constants.ADD; +import static org.dspace.core.Constants.READ; +import static org.dspace.core.Constants.REMOVE; +import static org.dspace.core.Constants.WRITE; import java.io.IOException; import java.sql.SQLException; import java.util.ArrayList; -import java.util.Date; import java.util.Iterator; import java.util.LinkedList; import java.util.List; @@ -74,14 +76,14 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl implement if (bundle == null) { if (log.isDebugEnabled()) { log.debug(LogHelper.getHeader(context, "find_bundle", - "not_found,bundle_id=" + id)); + "not_found,bundle_id=" + id)); } return null; } else { if (log.isDebugEnabled()) { log.debug(LogHelper.getHeader(context, "find_bundle", - "bundle_id=" + id)); + "bundle_id=" + id)); } return bundle; @@ -106,7 +108,7 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl implement log.info(LogHelper.getHeader(context, "create_bundle", "bundle_id=" - + bundle.getID())); + + bundle.getID())); // if we ever use the identifier service for bundles, we should // create the bundle before we create the Event and should add all @@ -132,12 +134,12 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl implement @Override public void addBitstream(Context context, Bundle bundle, Bitstream bitstream) - throws SQLException, AuthorizeException { + throws SQLException, AuthorizeException { // Check authorisation authorizeService.authorizeAction(context, bundle, Constants.ADD); log.info(LogHelper.getHeader(context, "add_bitstream", "bundle_id=" - + bundle.getID() + ",bitstream_id=" + bitstream.getID())); + + bundle.getID() + ",bitstream_id=" + bitstream.getID())); // First check that the bitstream isn't already in the list List bitstreams = bundle.getBitstreams(); @@ -167,8 +169,8 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl implement context.addEvent(new Event(Event.ADD, Constants.BUNDLE, bundle.getID(), - Constants.BITSTREAM, bitstream.getID(), String.valueOf(bitstream.getSequenceID()), - getIdentifiers(context, bundle))); + Constants.BITSTREAM, bitstream.getID(), String.valueOf(bitstream.getSequenceID()), + getIdentifiers(context, bundle))); // copy authorization policies from bundle to bitstream // FIXME: multiple inclusion is affected by this... @@ -211,17 +213,17 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl implement @Override public void removeBitstream(Context context, Bundle bundle, Bitstream bitstream) - throws AuthorizeException, SQLException, IOException { + throws AuthorizeException, SQLException, IOException { // Check authorisation authorizeService.authorizeAction(context, bundle, Constants.REMOVE); log.info(LogHelper.getHeader(context, "remove_bitstream", - "bundle_id=" + bundle.getID() + ",bitstream_id=" + bitstream.getID())); + "bundle_id=" + bundle.getID() + ",bitstream_id=" + bitstream.getID())); context.addEvent(new Event(Event.REMOVE, Constants.BUNDLE, bundle.getID(), - Constants.BITSTREAM, bitstream.getID(), String.valueOf(bitstream.getSequenceID()), - getIdentifiers(context, bundle))); + Constants.BITSTREAM, bitstream.getID(), String.valueOf(bitstream.getSequenceID()), + getIdentifiers(context, bundle))); //Ensure that the last modified from the item is triggered ! Item owningItem = (Item) getParentObject(context, bundle); @@ -254,9 +256,9 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl implement @Override public void inheritCollectionDefaultPolicies(Context context, Bundle bundle, Collection collection) - throws SQLException, AuthorizeException { + throws SQLException, AuthorizeException { List policies = authorizeService.getPoliciesActionFilter(context, collection, - Constants.DEFAULT_BITSTREAM_READ); + Constants.DEFAULT_BITSTREAM_READ); // change the action to just READ // just don't call update on the resourcepolicies!!! @@ -264,7 +266,7 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl implement if (!i.hasNext()) { throw new java.sql.SQLException("Collection " + collection.getID() - + " has no default bitstream READ policies"); + + " has no default bitstream READ policies"); } List newPolicies = new ArrayList(); @@ -279,7 +281,7 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl implement @Override public void replaceAllBitstreamPolicies(Context context, Bundle bundle, List newpolicies) - throws SQLException, AuthorizeException { + throws SQLException, AuthorizeException { List bitstreams = bundle.getBitstreams(); if (CollectionUtils.isNotEmpty(bitstreams)) { for (Bitstream bs : bitstreams) { @@ -401,16 +403,16 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl implement if (bitstream == null) { //This should never occur but just in case log.warn(LogHelper.getHeader(context, "Invalid bitstream id while changing bitstream order", - "Bundle: " + bundle.getID() + ", bitstream id: " + bitstreamId)); + "Bundle: " + bundle.getID() + ", bitstream id: " + bitstreamId)); continue; } // If we have a Bitstream not in the current list, log a warning & exit immediately if (!currentBitstreams.contains(bitstream)) { log.warn(LogHelper.getHeader(context, - "Encountered a bitstream not in this bundle while changing bitstream " + - "order. Bitstream order will not be changed.", - "Bundle: " + bundle.getID() + ", bitstream id: " + bitstreamId)); + "Encountered a bitstream not in this bundle while changing bitstream " + + "order. Bitstream order will not be changed.", + "Bundle: " + bundle.getID() + ", bitstream id: " + bitstreamId)); return; } updatedBitstreams.add(bitstream); @@ -419,9 +421,9 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl implement // If our lists are different sizes, exit immediately if (updatedBitstreams.size() != currentBitstreams.size()) { log.warn(LogHelper.getHeader(context, - "Size of old list and new list do not match. Bitstream order will not be " + - "changed.", - "Bundle: " + bundle.getID())); + "Size of old list and new list do not match. Bitstream order will not be " + + "changed.", + "Bundle: " + bundle.getID())); return; } @@ -467,7 +469,7 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl implement } else if (AuthorizeConfiguration.canCollectionAdminPerformBitstreamDeletion()) { adminObject = collection; } else if (AuthorizeConfiguration - .canCommunityAdminPerformBitstreamDeletion()) { + .canCommunityAdminPerformBitstreamDeletion()) { adminObject = community; } break; @@ -475,10 +477,10 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl implement if (AuthorizeConfiguration.canItemAdminPerformBitstreamCreation()) { adminObject = item; } else if (AuthorizeConfiguration - .canCollectionAdminPerformBitstreamCreation()) { + .canCollectionAdminPerformBitstreamCreation()) { adminObject = collection; } else if (AuthorizeConfiguration - .canCommunityAdminPerformBitstreamCreation()) { + .canCommunityAdminPerformBitstreamCreation()) { adminObject = community; } break; @@ -510,7 +512,7 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl implement // Check authorisation //AuthorizeManager.authorizeAction(ourContext, this, Constants.WRITE); log.info(LogHelper.getHeader(context, "update_bundle", "bundle_id=" - + bundle.getID())); + + bundle.getID())); super.update(context, bundle); bundleDAO.save(context, bundle); @@ -518,10 +520,10 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl implement if (bundle.isModified() || bundle.isMetadataModified()) { if (bundle.isMetadataModified()) { context.addEvent(new Event(Event.MODIFY_METADATA, bundle.getType(), bundle.getID(), bundle.getDetails(), - getIdentifiers(context, bundle))); + getIdentifiers(context, bundle))); } context.addEvent(new Event(Event.MODIFY, Constants.BUNDLE, bundle.getID(), - null, getIdentifiers(context, bundle))); + null, getIdentifiers(context, bundle))); bundle.clearModified(); bundle.clearDetails(); } @@ -530,12 +532,12 @@ public class BundleServiceImpl extends DSpaceObjectServiceImpl implement @Override public void delete(Context context, Bundle bundle) throws SQLException, AuthorizeException, IOException { log.info(LogHelper.getHeader(context, "delete_bundle", "bundle_id=" - + bundle.getID())); + + bundle.getID())); authorizeService.authorizeAction(context, bundle, Constants.DELETE); context.addEvent(new Event(Event.DELETE, Constants.BUNDLE, bundle.getID(), - bundle.getName(), getIdentifiers(context, bundle))); + bundle.getName(), getIdentifiers(context, bundle))); // Remove bitstreams List bitstreams = bundle.getBitstreams(); From b24f121c767c1803882f1ae03d91d49e5a751eed Mon Sep 17 00:00:00 2001 From: Jens Vannerum Date: Fri, 12 May 2023 10:11:09 +0200 Subject: [PATCH 069/125] 94299: checkstyle issue after main merge --- .../java/org/dspace/app/rest/BitstreamRestRepositoryIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java index 0b65f3e4b9..2a1044c28a 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/BitstreamRestRepositoryIT.java @@ -37,10 +37,10 @@ import org.dspace.app.rest.matcher.BitstreamFormatMatcher; import org.dspace.app.rest.matcher.BitstreamMatcher; import org.dspace.app.rest.matcher.BundleMatcher; import org.dspace.app.rest.matcher.HalMatcher; +import org.dspace.app.rest.matcher.MetadataMatcher; import org.dspace.app.rest.model.patch.Operation; import org.dspace.app.rest.model.patch.RemoveOperation; import org.dspace.app.rest.model.patch.ReplaceOperation; -import org.dspace.app.rest.matcher.MetadataMatcher; import org.dspace.app.rest.test.AbstractControllerIntegrationTest; import org.dspace.app.rest.test.MetadataPatchSuite; import org.dspace.authorize.service.ResourcePolicyService; From f844ca347b90feba504b10b6a1f15e05c0b81879 Mon Sep 17 00:00:00 2001 From: Agustina Martinez Date: Fri, 12 May 2023 13:24:28 +0100 Subject: [PATCH 070/125] Improved fix: override missing register method in VersionedDOIIdentifierProvider --- .../identifier/VersionedDOIIdentifierProvider.java | 9 +++++++++ .../app/rest/repository/IdentifierRestRepository.java | 3 +-- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java b/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java index a933e85d30..4374a2549e 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java +++ b/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java @@ -153,6 +153,15 @@ public class VersionedDOIIdentifierProvider extends DOIIdentifierProvider { register(context, dso, identifier, this.filter); } + @Override + public String register(Context context, DSpaceObject dso, Filter filter) + throws IdentifierException { + String doi = mint(context, dso, filter); + register(context, dso, doi, filter); + + return doi; + } + @Override public void register(Context context, DSpaceObject dso, String identifier, Filter filter) throws IdentifierException { diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/IdentifierRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/IdentifierRestRepository.java index b4a9688942..1be569d18e 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/IdentifierRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/IdentifierRestRepository.java @@ -234,8 +234,7 @@ public class IdentifierRestRepository extends DSpaceRestRepository Date: Fri, 12 May 2023 10:53:59 -0500 Subject: [PATCH 071/125] Remove setting to wait on two codecov builds --- .codecov.yml | 7 ------- 1 file changed, 7 deletions(-) diff --git a/.codecov.yml b/.codecov.yml index a628d33cbe..326dd3e0b2 100644 --- a/.codecov.yml +++ b/.codecov.yml @@ -4,13 +4,6 @@ # Can be validated via instructions at: # https://docs.codecov.io/docs/codecov-yaml#validate-your-repository-yaml -# Tell Codecov not to send a coverage notification until (at least) 2 builds are completed -# Since we run Unit & Integration tests in parallel, this lets Codecov know that coverage -# needs to be merged across those builds -codecov: - notify: - after_n_builds: 2 - # Settings related to code coverage analysis coverage: status: From 95af5fff410b92ef9224baed50c6900e7ae88754 Mon Sep 17 00:00:00 2001 From: Agustina Martinez Date: Mon, 15 May 2023 08:40:57 +0100 Subject: [PATCH 072/125] VersionedDOIIdentifierProvider: add non-fatal check that dso is of type Item --- .../dspace/identifier/VersionedDOIIdentifierProvider.java | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java b/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java index 4374a2549e..4ca186eaab 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java +++ b/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java @@ -156,7 +156,13 @@ public class VersionedDOIIdentifierProvider extends DOIIdentifierProvider { @Override public String register(Context context, DSpaceObject dso, Filter filter) throws IdentifierException { + if (!(dso instanceof Item)) { + // DOIs are currently assigned only to Items + return null; + } + String doi = mint(context, dso, filter); + register(context, dso, doi, filter); return doi; From 2f1d52bf3fbb40a7c02447a743a9843fe5ca927b Mon Sep 17 00:00:00 2001 From: Sascha Szott Date: Tue, 16 May 2023 09:09:17 +0200 Subject: [PATCH 073/125] removed eperson.subscription.onlynew --- dspace/config/dspace.cfg | 4 ---- 1 file changed, 4 deletions(-) diff --git a/dspace/config/dspace.cfg b/dspace/config/dspace.cfg index 65b1f951fa..cb69f9841f 100644 --- a/dspace/config/dspace.cfg +++ b/dspace/config/dspace.cfg @@ -893,10 +893,6 @@ org.dspace.app.batchitemimport.work.dir = ${dspace.dir}/imports # default = false, (disabled) #org.dspace.content.Collection.findAuthorizedPerformanceOptimize = true -# For backwards compatibility, the subscription emails by default include any modified items -# uncomment the following entry for only new items to be emailed -# eperson.subscription.onlynew = true - # Identifier providers. # Following are configuration values for the EZID DOI provider, with appropriate From 84ac53c7c2839f901d73247d33de63e67e32258c Mon Sep 17 00:00:00 2001 From: aroman-arvo Date: Tue, 16 May 2023 13:29:45 +0200 Subject: [PATCH 074/125] Security issue fixed: request item token exposed --- .../repository/RequestItemRepository.java | 4 +- .../app/rest/RequestItemRepositoryIT.java | 61 +++---------------- 2 files changed, 12 insertions(+), 53 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RequestItemRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RequestItemRepository.java index 198d0f765f..6fe3452a7f 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RequestItemRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RequestItemRepository.java @@ -207,8 +207,8 @@ public class RequestItemRepository } catch (IOException | SQLException ex) { throw new RuntimeException("Request not sent.", ex); } - - return requestItemConverter.convert(ri, Projection.DEFAULT); + // #8636 - Security issue: Should not return RequestItemRest to avoid token exposure + return null; } // NOTICE: there is no service method for this -- requests are never deleted? diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/RequestItemRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/RequestItemRepositoryIT.java index 4a90efb2c1..a9b2aaf3f2 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/RequestItemRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/RequestItemRepositoryIT.java @@ -221,32 +221,11 @@ public class RequestItemRepositoryIT // Create it and see if it was created correctly. ObjectMapper mapper = new ObjectMapper(); String authToken = getAuthToken(eperson.getEmail(), password); - AtomicReference requestTokenRef = new AtomicReference<>(); - try { - getClient(authToken) - .perform(post(URI_ROOT) - .content(mapper.writeValueAsBytes(rir)) - .contentType(contentType)) - .andExpect(status().isCreated()) - .andExpect(content().contentType(contentType)) - .andExpect(jsonPath("$", Matchers.allOf( - hasJsonPath("$.id", not(is(emptyOrNullString()))), - hasJsonPath("$.type", is(RequestItemRest.NAME)), - hasJsonPath("$.token", not(is(emptyOrNullString()))), - hasJsonPath("$.requestEmail", is(eperson.getEmail())), - hasJsonPath("$.requestMessage", is(RequestItemBuilder.REQ_MESSAGE)), - hasJsonPath("$.requestName", is(eperson.getFullName())), - hasJsonPath("$.allfiles", is(true)), - // TODO should be an ISO datetime - hasJsonPath("$.requestDate", not(is(emptyOrNullString()))), - hasJsonPath("$._links.self.href", not(is(emptyOrNullString()))) - ))) - .andDo((var result) -> requestTokenRef.set( - read(result.getResponse().getContentAsString(), "token"))); - } finally { - // Clean up the created request. - RequestItemBuilder.deleteRequestItem(requestTokenRef.get()); - } + getClient(authToken) + .perform(post(URI_ROOT) + .content(mapper.writeValueAsBytes(rir)) + .contentType(contentType)) + .andExpect(status().isCreated()); } /** @@ -273,31 +252,11 @@ public class RequestItemRepositoryIT // Create it and see if it was created correctly. ObjectMapper mapper = new ObjectMapper(); - AtomicReference requestTokenRef = new AtomicReference<>(); - try { - getClient().perform(post(URI_ROOT) - .content(mapper.writeValueAsBytes(rir)) - .contentType(contentType)) - .andExpect(status().isCreated()) - .andExpect(content().contentType(contentType)) - .andExpect(jsonPath("$", Matchers.allOf( - hasJsonPath("$.id", not(is(emptyOrNullString()))), - hasJsonPath("$.type", is(RequestItemRest.NAME)), - hasJsonPath("$.token", not(is(emptyOrNullString()))), - hasJsonPath("$.requestEmail", is(RequestItemBuilder.REQ_EMAIL)), - hasJsonPath("$.requestMessage", is(RequestItemBuilder.REQ_MESSAGE)), - hasJsonPath("$.requestName", is(RequestItemBuilder.REQ_NAME)), - hasJsonPath("$.allfiles", is(false)), - // TODO should be an ISO datetime - hasJsonPath("$.requestDate", not(is(emptyOrNullString()))), - hasJsonPath("$._links.self.href", not(is(emptyOrNullString()))) - ))) - .andDo((var result) -> requestTokenRef.set( - read(result.getResponse().getContentAsString(), "token"))); - } finally { - // Clean up the created request. - RequestItemBuilder.deleteRequestItem(requestTokenRef.get()); - } + getClient().perform(post(URI_ROOT) + .content(mapper.writeValueAsBytes(rir)) + .contentType(contentType)) + .andExpect(status().isCreated()); + } /** From 5f58e587f321989e277f2a37591cd9e939a9b5a2 Mon Sep 17 00:00:00 2001 From: aroman-arvo Date: Tue, 16 May 2023 13:29:45 +0200 Subject: [PATCH 075/125] Security issue fixed: request item token exposed --- .../repository/RequestItemRepository.java | 4 +- .../app/rest/RequestItemRepositoryIT.java | 64 +++---------------- 2 files changed, 12 insertions(+), 56 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RequestItemRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RequestItemRepository.java index 198d0f765f..6fe3452a7f 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RequestItemRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/RequestItemRepository.java @@ -207,8 +207,8 @@ public class RequestItemRepository } catch (IOException | SQLException ex) { throw new RuntimeException("Request not sent.", ex); } - - return requestItemConverter.convert(ri, Projection.DEFAULT); + // #8636 - Security issue: Should not return RequestItemRest to avoid token exposure + return null; } // NOTICE: there is no service method for this -- requests are never deleted? diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/RequestItemRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/RequestItemRepositoryIT.java index 4a90efb2c1..2000b6e47e 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/RequestItemRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/RequestItemRepositoryIT.java @@ -8,13 +8,10 @@ package org.dspace.app.rest; import static com.jayway.jsonpath.JsonPath.read; -import static com.jayway.jsonpath.matchers.JsonPathMatchers.hasJsonPath; import static org.exparity.hamcrest.date.DateMatchers.within; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.not; -import static org.hamcrest.text.IsEmptyString.emptyOrNullString; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors.csrf; @@ -221,32 +218,11 @@ public class RequestItemRepositoryIT // Create it and see if it was created correctly. ObjectMapper mapper = new ObjectMapper(); String authToken = getAuthToken(eperson.getEmail(), password); - AtomicReference requestTokenRef = new AtomicReference<>(); - try { - getClient(authToken) - .perform(post(URI_ROOT) - .content(mapper.writeValueAsBytes(rir)) - .contentType(contentType)) - .andExpect(status().isCreated()) - .andExpect(content().contentType(contentType)) - .andExpect(jsonPath("$", Matchers.allOf( - hasJsonPath("$.id", not(is(emptyOrNullString()))), - hasJsonPath("$.type", is(RequestItemRest.NAME)), - hasJsonPath("$.token", not(is(emptyOrNullString()))), - hasJsonPath("$.requestEmail", is(eperson.getEmail())), - hasJsonPath("$.requestMessage", is(RequestItemBuilder.REQ_MESSAGE)), - hasJsonPath("$.requestName", is(eperson.getFullName())), - hasJsonPath("$.allfiles", is(true)), - // TODO should be an ISO datetime - hasJsonPath("$.requestDate", not(is(emptyOrNullString()))), - hasJsonPath("$._links.self.href", not(is(emptyOrNullString()))) - ))) - .andDo((var result) -> requestTokenRef.set( - read(result.getResponse().getContentAsString(), "token"))); - } finally { - // Clean up the created request. - RequestItemBuilder.deleteRequestItem(requestTokenRef.get()); - } + getClient(authToken) + .perform(post(URI_ROOT) + .content(mapper.writeValueAsBytes(rir)) + .contentType(contentType)) + .andExpect(status().isCreated()); } /** @@ -273,31 +249,11 @@ public class RequestItemRepositoryIT // Create it and see if it was created correctly. ObjectMapper mapper = new ObjectMapper(); - AtomicReference requestTokenRef = new AtomicReference<>(); - try { - getClient().perform(post(URI_ROOT) - .content(mapper.writeValueAsBytes(rir)) - .contentType(contentType)) - .andExpect(status().isCreated()) - .andExpect(content().contentType(contentType)) - .andExpect(jsonPath("$", Matchers.allOf( - hasJsonPath("$.id", not(is(emptyOrNullString()))), - hasJsonPath("$.type", is(RequestItemRest.NAME)), - hasJsonPath("$.token", not(is(emptyOrNullString()))), - hasJsonPath("$.requestEmail", is(RequestItemBuilder.REQ_EMAIL)), - hasJsonPath("$.requestMessage", is(RequestItemBuilder.REQ_MESSAGE)), - hasJsonPath("$.requestName", is(RequestItemBuilder.REQ_NAME)), - hasJsonPath("$.allfiles", is(false)), - // TODO should be an ISO datetime - hasJsonPath("$.requestDate", not(is(emptyOrNullString()))), - hasJsonPath("$._links.self.href", not(is(emptyOrNullString()))) - ))) - .andDo((var result) -> requestTokenRef.set( - read(result.getResponse().getContentAsString(), "token"))); - } finally { - // Clean up the created request. - RequestItemBuilder.deleteRequestItem(requestTokenRef.get()); - } + getClient().perform(post(URI_ROOT) + .content(mapper.writeValueAsBytes(rir)) + .contentType(contentType)) + .andExpect(status().isCreated()); + } /** From 567e989202aeb55393892ca402ab26b85324f6da Mon Sep 17 00:00:00 2001 From: aroman-arvo Date: Wed, 17 May 2023 19:11:31 +0200 Subject: [PATCH 076/125] 8636 Request a Copy - integration test increased --- .../app/rest/RequestItemRepositoryIT.java | 68 ++++++++++++++++--- 1 file changed, 57 insertions(+), 11 deletions(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/RequestItemRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/RequestItemRepositoryIT.java index 2000b6e47e..937d35c091 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/RequestItemRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/RequestItemRepositoryIT.java @@ -13,6 +13,7 @@ import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.springframework.security.test.web.servlet.request.SecurityMockMvcRequestPostProcessors.csrf; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete; @@ -31,6 +32,7 @@ import java.io.InputStream; import java.sql.SQLException; import java.time.temporal.ChronoUnit; import java.util.Date; +import java.util.Iterator; import java.util.Map; import java.util.UUID; import java.util.concurrent.atomic.AtomicReference; @@ -218,12 +220,34 @@ public class RequestItemRepositoryIT // Create it and see if it was created correctly. ObjectMapper mapper = new ObjectMapper(); String authToken = getAuthToken(eperson.getEmail(), password); - getClient(authToken) - .perform(post(URI_ROOT) - .content(mapper.writeValueAsBytes(rir)) - .contentType(contentType)) - .andExpect(status().isCreated()); - } + try { + getClient(authToken) + .perform(post(URI_ROOT) + .content(mapper.writeValueAsBytes(rir)) + .contentType(contentType)) + .andExpect(status().isCreated()) + // verify the body is empty + .andExpect(jsonPath("$").doesNotExist()); + } finally { + Iterator itemRequests = requestItemService.findByItem(context, item); + String token = null; + for (Iterator it = itemRequests; it.hasNext();) { + RequestItem requestItem = it.next(); + // Find the created request via the eperson email + if (requestItem.getReqEmail().equals(eperson.getEmail())) { + // Verify request data + assertEquals(eperson.getFullName(), requestItem.getReqName()); + assertEquals(item.getID(), requestItem.getItem().getID()); + assertEquals(RequestItemBuilder.REQ_MESSAGE, requestItem.getReqMessage()); + assertEquals(true, requestItem.isAllfiles()); + assertNotNull(requestItem.getToken()); + token = requestItem.getToken(); + } + } + // Cleanup created request + RequestItemBuilder.deleteRequestItem(token); + } +} /** * Test of createAndReturn method, with an UNauthenticated user. @@ -249,11 +273,33 @@ public class RequestItemRepositoryIT // Create it and see if it was created correctly. ObjectMapper mapper = new ObjectMapper(); - getClient().perform(post(URI_ROOT) - .content(mapper.writeValueAsBytes(rir)) - .contentType(contentType)) - .andExpect(status().isCreated()); - + try{ + getClient().perform(post(URI_ROOT) + .content(mapper.writeValueAsBytes(rir)) + .contentType(contentType)) + .andExpect(status().isCreated()) + // verify the body is empty + .andExpect(jsonPath("$").doesNotExist()); + }finally{ + Iterator itemRequests = requestItemService.findByItem(context, item); + String token = null; + for (Iterator it = itemRequests; it.hasNext();) { + RequestItem requestItem = it.next(); + // Find the created request via the eperson email + if (requestItem.getReqEmail().equals(RequestItemBuilder.REQ_EMAIL)) { + // Verify request data + assertEquals(item.getID(), requestItem.getItem().getID()); + assertEquals(RequestItemBuilder.REQ_MESSAGE, requestItem.getReqMessage()); + assertEquals(RequestItemBuilder.REQ_NAME, requestItem.getReqName()); + assertEquals(bitstream.getID(), requestItem.getBitstream().getID()); + assertEquals(false, requestItem.isAllfiles()); + assertNotNull(requestItem.getToken()); + token = requestItem.getToken(); + } + } + // Cleanup created request + RequestItemBuilder.deleteRequestItem(token); + } } /** From 9784c6f50dc984f6769f3774066bcd4be5eba53c Mon Sep 17 00:00:00 2001 From: aroman-arvo Date: Wed, 17 May 2023 19:26:50 +0200 Subject: [PATCH 077/125] 8636 - checkstyle --- .../java/org/dspace/app/rest/RequestItemRepositoryIT.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/RequestItemRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/RequestItemRepositoryIT.java index 937d35c091..2fb7dbbc96 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/RequestItemRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/RequestItemRepositoryIT.java @@ -273,14 +273,14 @@ public class RequestItemRepositoryIT // Create it and see if it was created correctly. ObjectMapper mapper = new ObjectMapper(); - try{ + try { getClient().perform(post(URI_ROOT) .content(mapper.writeValueAsBytes(rir)) .contentType(contentType)) .andExpect(status().isCreated()) // verify the body is empty .andExpect(jsonPath("$").doesNotExist()); - }finally{ + } finally { Iterator itemRequests = requestItemService.findByItem(context, item); String token = null; for (Iterator it = itemRequests; it.hasNext();) { From 564f9fdcf6d18514cb416773faf9596d63839e2a Mon Sep 17 00:00:00 2001 From: Alan Orth Date: Fri, 19 May 2023 16:43:05 +0300 Subject: [PATCH 078/125] pom.xml: bump a handful of dependencies All minor and patch versions with no breaking changes: - pdfbox 2.0.28 - See: https://issues.apache.org/jira/secure/ReleaseNote.jspa?projectId=12310760&version=12352284 - log4j 2.20.0 - See: https://logging.apache.org/log4j/2.x/release-notes/2.20.0.html - rome 1.19.0 - See: https://github.com/rometools/rome/releases/tag/1.19.0 - slf4j 1.7.36 - https://www.slf4j.org/news.html - solr-client 8.11.2 - https://lucene.apache.org/core/8_11_2/changes/Changes.html - hibernate 5.6.15.Final - See: https://hibernate.org/orm/releases/5.6/#whats-new - hibernate-validator 6.2.5.Final - See: https://hibernate.org/validator/documentation/migration-guide/#6-2-x - postgresql JDBC driver 42.6.0 --- pom.xml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/pom.xml b/pom.xml index 7e09188027..828854c0ca 100644 --- a/pom.xml +++ b/pom.xml @@ -22,10 +22,10 @@ 5.3.27 2.7.11 5.7.8 - 5.6.5.Final - 6.0.23.Final - 42.4.3 - 8.11.1 + 5.6.15.Final + 6.2.5.Final + 42.6.0 + 8.11.2 3.4.0 2.10.0 @@ -38,10 +38,10 @@ 1.1.0 9.4.51.v20230217 - 2.17.1 - 2.0.27 - 1.18.0 - 1.7.25 + 2.20.0 + 2.0.28 + 1.19.0 + 1.7.36 2.3.0 1.70 From d7d7f7c37034f6c571c1a61d5ba9afdc71d91d0c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Paulo=20Gra=C3=A7a?= Date: Fri, 19 May 2023 22:44:07 +0100 Subject: [PATCH 079/125] support for entity type for collection at input submissions --- .../app/util/SubmissionConfigReader.java | 26 ++++++++++++++++--- .../dspace/content/CollectionServiceImpl.java | 22 ++++++++++++++++ .../content/service/CollectionService.java | 15 +++++++++++ dspace/config/item-submission.dtd | 3 ++- dspace/config/item-submission.xml | 20 ++++++++++++++ 5 files changed, 82 insertions(+), 4 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java b/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java index 2120848358..7132c1e934 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java +++ b/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java @@ -22,6 +22,8 @@ import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; import org.dspace.content.Collection; import org.dspace.content.DSpaceObject; +import org.dspace.content.factory.ContentServiceFactory; +import org.dspace.content.service.CollectionService; import org.dspace.core.Context; import org.dspace.handle.factory.HandleServiceFactory; import org.dspace.services.factory.DSpaceServicesFactory; @@ -104,6 +106,9 @@ public class SubmissionConfigReader { * always reload from scratch) */ private SubmissionConfig lastSubmissionConfig = null; + + protected static final CollectionService collectionService + = ContentServiceFactory.getInstance().getCollectionService(); /** * Load Submission Configuration from the @@ -335,17 +340,22 @@ public class SubmissionConfigReader { * by the collection handle. */ private void processMap(Node e) throws SAXException { + // create a context + Context context = new Context(); + NodeList nl = e.getChildNodes(); int len = nl.getLength(); for (int i = 0; i < len; i++) { Node nd = nl.item(i); if (nd.getNodeName().equals("name-map")) { String id = getAttribute(nd, "collection-handle"); + String entityType = getAttribute(nd, "collection-entity-type"); String value = getAttribute(nd, "submission-name"); String content = getValue(nd); - if (id == null) { + if (id == null && entityType == null) { throw new SAXException( - "name-map element is missing collection-handle attribute in 'item-submission.xml'"); + "name-map element is missing collection-handle or collection-entity-type attribute " + + "in 'item-submission.xml'"); } if (value == null) { throw new SAXException( @@ -355,7 +365,17 @@ public class SubmissionConfigReader { throw new SAXException( "name-map element has content in 'item-submission.xml', it should be empty."); } - collectionToSubmissionConfig.put(id, value); + if (id != null) { + collectionToSubmissionConfig.put(id, value); + + } else { + // get all collections for this entity-type + List collections = collectionService.findAllCollectionsByEntityType( context, + entityType); + for (Collection collection : collections) { + collectionToSubmissionConfig.putIfAbsent(collection.getHandle(), value); + } + } } // ignore any child node that isn't a "name-map" } } diff --git a/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java index ddfd38694f..ef89009ebf 100644 --- a/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java @@ -18,6 +18,7 @@ import java.util.List; import java.util.Map; import java.util.MissingResourceException; import java.util.Objects; +import java.util.Optional; import java.util.Set; import java.util.UUID; @@ -1047,4 +1048,25 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl i return (int) resp.getTotalSearchResults(); } + @Override + @SuppressWarnings("rawtypes") + public List findAllCollectionsByEntityType(Context context, String entityType) + throws SearchServiceException { + List collectionList = new ArrayList<>(); + + DiscoverQuery discoverQuery = new DiscoverQuery(); + discoverQuery.setMaxResults(0); + discoverQuery.setDSpaceObjectFilter(IndexableCollection.TYPE); + discoverQuery.addFilterQueries("dspace.entity.type:" + entityType); + + DiscoverResult discoverResult = searchService.search(context, discoverQuery); + List solrIndexableObjects = discoverResult.getIndexableObjects(); + + for (IndexableObject solrCollection : solrIndexableObjects) { + Collection c = ((IndexableCollection) solrCollection).getIndexedObject(); + collectionList.add(c); + } + return collectionList; + } + } diff --git a/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java b/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java index a5b2b7d8d8..82d8b24fb7 100644 --- a/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java +++ b/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java @@ -455,4 +455,19 @@ public interface CollectionService public int countCollectionsWithSubmit(String q, Context context, Community community, String entityType) throws SQLException, SearchServiceException; + + /** + * Returns a list of all collections for a specific entity type. + * NOTE: for better performance, this method retrieves its results from an index (cache) + * and does not query the database directly. + * This means that results may be stale or outdated until + * https://github.com/DSpace/DSpace/issues/2853 is resolved." + * + * @param context DSpace Context + * @param entityType limit the returned collection to those related to given entity type + * @return list of collections found + * @throws SearchServiceException if search error + */ + public List findAllCollectionsByEntityType(Context context, String entityType) + throws SearchServiceException; } diff --git a/dspace/config/item-submission.dtd b/dspace/config/item-submission.dtd index 6490dac62c..dd1afa0dd0 100644 --- a/dspace/config/item-submission.dtd +++ b/dspace/config/item-submission.dtd @@ -11,7 +11,8 @@ diff --git a/dspace/config/item-submission.xml b/dspace/config/item-submission.xml index 2ab26dcf57..f937a5fd9a 100644 --- a/dspace/config/item-submission.xml +++ b/dspace/config/item-submission.xml @@ -47,6 +47,26 @@ --> + + + From 0409373b617a4f3c0b8e76c50133b14c6e016718 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Paulo=20Gra=C3=A7a?= Date: Sat, 20 May 2023 08:07:05 +0100 Subject: [PATCH 080/125] handling exceptions --- .../java/org/dspace/app/util/SubmissionConfigReader.java | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java b/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java index 7132c1e934..82ebbd0d0d 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java +++ b/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java @@ -25,6 +25,7 @@ import org.dspace.content.DSpaceObject; import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.CollectionService; import org.dspace.core.Context; +import org.dspace.discovery.SearchServiceException; import org.dspace.handle.factory.HandleServiceFactory; import org.dspace.services.factory.DSpaceServicesFactory; import org.w3c.dom.Document; @@ -157,6 +158,9 @@ public class SubmissionConfigReader { } catch (FactoryConfigurationError fe) { throw new SubmissionConfigReaderException( "Cannot create Item Submission Configuration parser", fe); + } catch (SearchServiceException se) { + throw new SubmissionConfigReaderException( + "Cannot perform a discovery search for Item Submission Configuration", se); } catch (Exception e) { throw new SubmissionConfigReaderException( "Error creating Item Submission Configuration: " + e); @@ -292,7 +296,7 @@ public class SubmissionConfigReader { * should correspond to the collection-form maps, the form definitions, and * the display/storage word pairs. */ - private void doNodes(Node n) throws SAXException, SubmissionConfigReaderException { + private void doNodes(Node n) throws SAXException, SearchServiceException, SubmissionConfigReaderException { if (n == null) { return; } @@ -339,7 +343,7 @@ public class SubmissionConfigReader { * the collection handle and item submission name, put name in hashmap keyed * by the collection handle. */ - private void processMap(Node e) throws SAXException { + private void processMap(Node e) throws SAXException, SearchServiceException { // create a context Context context = new Context(); From 687b6216dfabb5e7d4069c30e7aeb2cecc73b602 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Paulo=20Gra=C3=A7a?= Date: Sat, 20 May 2023 10:41:51 +0100 Subject: [PATCH 081/125] checkstyle violations fixing --- .../java/org/dspace/app/util/SubmissionConfigReader.java | 6 +++++- .../main/java/org/dspace/content/CollectionServiceImpl.java | 1 - .../java/org/dspace/content/service/CollectionService.java | 1 - 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java b/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java index 82ebbd0d0d..91be9a08e6 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java +++ b/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java @@ -107,7 +107,11 @@ public class SubmissionConfigReader { * always reload from scratch) */ private SubmissionConfig lastSubmissionConfig = null; - + + /** + * Collection Service instance, needed to interact with collection's + * stored data + */ protected static final CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService(); diff --git a/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java index ef89009ebf..2166a94738 100644 --- a/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java @@ -18,7 +18,6 @@ import java.util.List; import java.util.Map; import java.util.MissingResourceException; import java.util.Objects; -import java.util.Optional; import java.util.Set; import java.util.UUID; diff --git a/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java b/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java index 82d8b24fb7..9ded79fada 100644 --- a/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java +++ b/dspace-api/src/main/java/org/dspace/content/service/CollectionService.java @@ -455,7 +455,6 @@ public interface CollectionService public int countCollectionsWithSubmit(String q, Context context, Community community, String entityType) throws SQLException, SearchServiceException; - /** * Returns a list of all collections for a specific entity type. * NOTE: for better performance, this method retrieves its results from an index (cache) From 6fa9e74d9006b260a7ca5edc40d734219b487682 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Paulo=20Gra=C3=A7a?= Date: Sat, 20 May 2023 11:35:27 +0100 Subject: [PATCH 082/125] checkstyle violations fixing --- .../main/java/org/dspace/app/util/SubmissionConfigReader.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java b/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java index 91be9a08e6..d394c60e41 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java +++ b/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java @@ -109,7 +109,7 @@ public class SubmissionConfigReader { private SubmissionConfig lastSubmissionConfig = null; /** - * Collection Service instance, needed to interact with collection's + * Collection Service instance, needed to interact with collection's * stored data */ protected static final CollectionService collectionService @@ -381,7 +381,7 @@ public class SubmissionConfigReader { List collections = collectionService.findAllCollectionsByEntityType( context, entityType); for (Collection collection : collections) { - collectionToSubmissionConfig.putIfAbsent(collection.getHandle(), value); + collectionToSubmissionConfig.putIfAbsent(collection.getHandle(), value); } } } // ignore any child node that isn't a "name-map" From 2ef268380fedec1aaba1acff291ab04e425eab84 Mon Sep 17 00:00:00 2001 From: Kim Shepherd Date: Sat, 20 May 2023 12:56:38 +1200 Subject: [PATCH 083/125] Unlink DOI from item on deletion even if no provider is configured --- .../java/org/dspace/content/ItemServiceImpl.java | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java index a290cb0d99..f86b6690ad 100644 --- a/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/ItemServiceImpl.java @@ -64,7 +64,9 @@ import org.dspace.eperson.service.SubscribeService; import org.dspace.event.Event; import org.dspace.harvest.HarvestedItem; import org.dspace.harvest.service.HarvestedItemService; +import org.dspace.identifier.DOI; import org.dspace.identifier.IdentifierException; +import org.dspace.identifier.service.DOIService; import org.dspace.identifier.service.IdentifierService; import org.dspace.orcid.OrcidHistory; import org.dspace.orcid.OrcidQueue; @@ -123,6 +125,8 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl implements It @Autowired(required = true) protected IdentifierService identifierService; @Autowired(required = true) + protected DOIService doiService; + @Autowired(required = true) protected VersioningService versioningService; @Autowired(required = true) protected HarvestedItemService harvestedItemService; @@ -786,6 +790,16 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl implements It // Remove any Handle handleService.unbindHandle(context, item); + // Delete a DOI if linked to the item. + // If no DOI consumer or provider is configured, but a DOI remains linked to this item's uuid, + // hibernate will throw a foreign constraint exception. + // Here we use the DOI service directly as it is able to manage DOIs even without any configured + // consumer or provider. + DOI doi = doiService.findDOIByDSpaceObject(context, item); + if (doi != null) { + doi.setDSpaceObject(null); + } + // remove version attached to the item removeVersion(context, item); From 208cac08d561de5992812fe2aaaf92929fedd4b4 Mon Sep 17 00:00:00 2001 From: Kim Shepherd Date: Sun, 21 May 2023 15:42:56 +1200 Subject: [PATCH 084/125] modifying unit tests as per CI feedback, stubbings now unnecessary --- .../src/test/java/org/dspace/content/CollectionTest.java | 3 --- dspace-api/src/test/java/org/dspace/content/ItemTest.java | 2 -- 2 files changed, 5 deletions(-) diff --git a/dspace-api/src/test/java/org/dspace/content/CollectionTest.java b/dspace-api/src/test/java/org/dspace/content/CollectionTest.java index 1548ebcae0..13d037abf8 100644 --- a/dspace-api/src/test/java/org/dspace/content/CollectionTest.java +++ b/dspace-api/src/test/java/org/dspace/content/CollectionTest.java @@ -725,9 +725,6 @@ public class CollectionTest extends AbstractDSpaceObjectTest { // Allow Item REMOVE perms doNothing().when(authorizeServiceSpy) .authorizeAction(any(Context.class), any(Item.class), eq(Constants.REMOVE)); - // Allow Item WRITE perms (Needed to remove identifiers, e.g. DOI, before Item deletion) - doNothing().when(authorizeServiceSpy) - .authorizeAction(any(Context.class), any(Item.class), eq(Constants.WRITE)); // create & add item first context.turnOffAuthorisationSystem(); diff --git a/dspace-api/src/test/java/org/dspace/content/ItemTest.java b/dspace-api/src/test/java/org/dspace/content/ItemTest.java index 15e425e23a..bae6ce9e1d 100644 --- a/dspace-api/src/test/java/org/dspace/content/ItemTest.java +++ b/dspace-api/src/test/java/org/dspace/content/ItemTest.java @@ -1189,8 +1189,6 @@ public class ItemTest extends AbstractDSpaceObjectTest { doNothing().when(authorizeServiceSpy).authorizeAction(context, item, Constants.REMOVE, true); // Allow Item DELETE perms doNothing().when(authorizeServiceSpy).authorizeAction(context, item, Constants.DELETE); - // Allow Item WRITE perms (required to first delete identifiers) - doNothing().when(authorizeServiceSpy).authorizeAction(context, item, Constants.WRITE); UUID id = item.getID(); itemService.delete(context, item); From 2b3af3a126ae9b5f523660cb544d7cd0a6192f9b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Paulo=20Gra=C3=A7a?= Date: Mon, 22 May 2023 08:42:33 +0100 Subject: [PATCH 085/125] checkstyle violations fixing --- .../main/java/org/dspace/app/util/SubmissionConfigReader.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java b/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java index d394c60e41..9ed539ee4f 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java +++ b/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java @@ -663,4 +663,4 @@ public class SubmissionConfigReader { } return results; } -} +} \ No newline at end of file From 6018a2b7be85c3593e30c66bfedf1a95d9363439 Mon Sep 17 00:00:00 2001 From: Alan Orth Date: Mon, 22 May 2023 11:50:59 +0300 Subject: [PATCH 086/125] pom.xml: bump tika from 2.3.0 to 2.5.0 A handful of bug fixes, improvements to PDF parsing, and updates to dependencies. This is the highest we can go right now without hitting dependency convergence issues related to bouncycastle. See: https://github.com/apache/tika/blob/2.5.0/CHANGES.txt --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 828854c0ca..ad368e3180 100644 --- a/pom.xml +++ b/pom.xml @@ -42,7 +42,7 @@ 2.0.28 1.19.0 1.7.36 - 2.3.0 + 2.5.0 1.70 From 8ddf4702af8a13f0527ede69109d2de993463c72 Mon Sep 17 00:00:00 2001 From: Francesco Pio Scognamiglio Date: Mon, 22 May 2023 11:09:43 +0200 Subject: [PATCH 087/125] [DURACOM-127] check if versioning is enabled for versioned identifier providers --- .../VersionedDOIIdentifierProvider.java | 16 +++++++++++++++- .../VersionedHandleIdentifierProvider.java | 16 +++++++++++++++- ...eIdentifierProviderWithCanonicalHandles.java | 17 ++++++++++++++++- 3 files changed, 46 insertions(+), 3 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java b/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java index 4ca186eaab..e5a90907c7 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java +++ b/dspace-api/src/main/java/org/dspace/identifier/VersionedDOIIdentifierProvider.java @@ -27,13 +27,14 @@ import org.dspace.versioning.Version; import org.dspace.versioning.VersionHistory; import org.dspace.versioning.service.VersionHistoryService; import org.dspace.versioning.service.VersioningService; +import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; /** * @author Marsa Haoua * @author Pascal-Nicolas Becker (dspace at pascal dash becker dot de) */ -public class VersionedDOIIdentifierProvider extends DOIIdentifierProvider { +public class VersionedDOIIdentifierProvider extends DOIIdentifierProvider implements InitializingBean { /** * log4j category */ @@ -49,6 +50,19 @@ public class VersionedDOIIdentifierProvider extends DOIIdentifierProvider { @Autowired(required = true) protected VersionHistoryService versionHistoryService; + /** + * After all the properties are set check that the versioning is enabled + * + * @throws Exception throws an exception if this isn't the case + */ + @Override + public void afterPropertiesSet() throws Exception { + if (!configurationService.getBooleanProperty("versioning.enabled", true)) { + throw new RuntimeException("the " + VersionedDOIIdentifierProvider.class.getName() + + " is enabled, but the versioning is disabled."); + } + } + @Override public String mint(Context context, DSpaceObject dso) throws IdentifierException { return mint(context, dso, this.filter); diff --git a/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProvider.java b/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProvider.java index b29d47f406..4f9efd2206 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProvider.java +++ b/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProvider.java @@ -35,6 +35,7 @@ import org.dspace.versioning.Version; import org.dspace.versioning.VersionHistory; import org.dspace.versioning.service.VersionHistoryService; import org.dspace.versioning.service.VersioningService; +import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -45,7 +46,7 @@ import org.springframework.stereotype.Component; * @author Pascal-Nicolas Becker (dspace at pascal dash becker dot de) */ @Component -public class VersionedHandleIdentifierProvider extends IdentifierProvider { +public class VersionedHandleIdentifierProvider extends IdentifierProvider implements InitializingBean { /** * log4j category */ @@ -71,6 +72,19 @@ public class VersionedHandleIdentifierProvider extends IdentifierProvider { @Autowired(required = true) protected ContentServiceFactory contentServiceFactory; + /** + * After all the properties are set check that the versioning is enabled + * + * @throws Exception throws an exception if this isn't the case + */ + @Override + public void afterPropertiesSet() throws Exception { + if (!configurationService.getBooleanProperty("versioning.enabled", true)) { + throw new RuntimeException("the " + VersionedHandleIdentifierProvider.class.getName() + + " is enabled, but the versioning is disabled."); + } + } + @Override public boolean supports(Class identifier) { return Handle.class.isAssignableFrom(identifier); diff --git a/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java b/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java index 0fac326ca1..78ad6b7b79 100644 --- a/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java +++ b/dspace-api/src/main/java/org/dspace/identifier/VersionedHandleIdentifierProviderWithCanonicalHandles.java @@ -30,6 +30,7 @@ import org.dspace.versioning.Version; import org.dspace.versioning.VersionHistory; import org.dspace.versioning.service.VersionHistoryService; import org.dspace.versioning.service.VersioningService; +import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @@ -39,7 +40,8 @@ import org.springframework.stereotype.Component; * @author Ben Bosman (ben at atmire dot com) */ @Component -public class VersionedHandleIdentifierProviderWithCanonicalHandles extends IdentifierProvider { +public class VersionedHandleIdentifierProviderWithCanonicalHandles extends IdentifierProvider + implements InitializingBean { /** * log4j category */ @@ -65,6 +67,19 @@ public class VersionedHandleIdentifierProviderWithCanonicalHandles extends Ident @Autowired(required = true) private ItemService itemService; + /** + * After all the properties are set check that the versioning is enabled + * + * @throws Exception throws an exception if this isn't the case + */ + @Override + public void afterPropertiesSet() throws Exception { + if (!configurationService.getBooleanProperty("versioning.enabled", true)) { + throw new RuntimeException("the " + VersionedHandleIdentifierProviderWithCanonicalHandles.class.getName() + + " is enabled, but the versioning is disabled."); + } + } + @Override public boolean supports(Class identifier) { return Handle.class.isAssignableFrom(identifier); From fc2589464f7e2471aff52b252c83fb4b6e7eebdf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Paulo=20Gra=C3=A7a?= Date: Mon, 22 May 2023 12:57:56 +0100 Subject: [PATCH 088/125] checkstyle violations fixing and remove unnecessary max rows limit --- .../main/java/org/dspace/app/util/SubmissionConfigReader.java | 2 +- .../src/main/java/org/dspace/content/CollectionServiceImpl.java | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java b/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java index 9ed539ee4f..0f144fd69f 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java +++ b/dspace-api/src/main/java/org/dspace/app/util/SubmissionConfigReader.java @@ -381,7 +381,7 @@ public class SubmissionConfigReader { List collections = collectionService.findAllCollectionsByEntityType( context, entityType); for (Collection collection : collections) { - collectionToSubmissionConfig.putIfAbsent(collection.getHandle(), value); + collectionToSubmissionConfig.putIfAbsent(collection.getHandle(), value); } } } // ignore any child node that isn't a "name-map" diff --git a/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java b/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java index 2166a94738..5b70cc4ec0 100644 --- a/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java +++ b/dspace-api/src/main/java/org/dspace/content/CollectionServiceImpl.java @@ -1054,7 +1054,6 @@ public class CollectionServiceImpl extends DSpaceObjectServiceImpl i List collectionList = new ArrayList<>(); DiscoverQuery discoverQuery = new DiscoverQuery(); - discoverQuery.setMaxResults(0); discoverQuery.setDSpaceObjectFilter(IndexableCollection.TYPE); discoverQuery.addFilterQueries("dspace.entity.type:" + entityType); From cbfed3e2183dc4fa7d9c0e8fd253b47e28364f79 Mon Sep 17 00:00:00 2001 From: Francesco Pio Scognamiglio Date: Mon, 22 May 2023 15:37:48 +0200 Subject: [PATCH 089/125] [DURACOM-148] add crossref metadata processor to manage the date issued field --- .../CrossRefDateMetadataProcessor.java | 79 +++++++++++++++++++ ...CrossRefImportMetadataSourceServiceIT.java | 4 +- .../spring/api/crossref-integration.xml | 5 +- 3 files changed, 85 insertions(+), 3 deletions(-) create mode 100644 dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefDateMetadataProcessor.java diff --git a/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefDateMetadataProcessor.java b/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefDateMetadataProcessor.java new file mode 100644 index 0000000000..c7d806cdf4 --- /dev/null +++ b/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefDateMetadataProcessor.java @@ -0,0 +1,79 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.importer.external.crossref; + +import java.text.SimpleDateFormat; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Iterator; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.dspace.importer.external.metadatamapping.contributor.JsonPathMetadataProcessor; +import org.joda.time.LocalDate; + +/** + * This class is used for CrossRef's Live-Import to extract + * issued attribute. + * Beans are configured in the crossref-integration.xml file. + * + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +public class CrossRefDateMetadataProcessor implements JsonPathMetadataProcessor { + + private final static Logger log = LogManager.getLogger(); + + private String pathToArray; + + @Override + public Collection processMetadata(String json) { + JsonNode rootNode = convertStringJsonToJsonNode(json); + Iterator dates = rootNode.at(pathToArray).iterator(); + Collection values = new ArrayList<>(); + while (dates.hasNext()) { + JsonNode date = dates.next(); + LocalDate issuedDate = null; + SimpleDateFormat issuedDateFormat = null; + if (date.has(0) && date.has(1) && date.has(2)) { + issuedDate = new LocalDate( + date.get(0).numberValue().intValue(), + date.get(1).numberValue().intValue(), + date.get(2).numberValue().intValue()); + issuedDateFormat = new SimpleDateFormat("yyyy-MM-dd"); + } else if (date.has(0) && date.has(1)) { + issuedDate = new LocalDate().withYear(date.get(0).numberValue().intValue()) + .withMonthOfYear(date.get(1).numberValue().intValue()); + issuedDateFormat = new SimpleDateFormat("yyyy-MM"); + } else if (date.has(0)) { + issuedDate = new LocalDate().withYear(date.get(0).numberValue().intValue()); + issuedDateFormat = new SimpleDateFormat("yyyy"); + } + values.add(issuedDateFormat.format(issuedDate.toDate())); + } + return values; + } + + private JsonNode convertStringJsonToJsonNode(String json) { + ObjectMapper mapper = new ObjectMapper(); + JsonNode body = null; + try { + body = mapper.readTree(json); + } catch (JsonProcessingException e) { + log.error("Unable to process json response.", e); + } + return body; + } + + public void setPathToArray(String pathToArray) { + this.pathToArray = pathToArray; + } + +} \ No newline at end of file diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/CrossRefImportMetadataSourceServiceIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/CrossRefImportMetadataSourceServiceIT.java index 9a0d39225c..11fe58ac1d 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/CrossRefImportMetadataSourceServiceIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/CrossRefImportMetadataSourceServiceIT.java @@ -146,7 +146,7 @@ public class CrossRefImportMetadataSourceServiceIT extends AbstractLiveImportInt + " Medical College of Prevention of Iodine Deficiency Diseases"); MetadatumDTO author = createMetadatumDTO("dc", "contributor", "author", "L.V. Senyuk"); MetadatumDTO type = createMetadatumDTO("dc", "type", null, "journal-article"); - MetadatumDTO date = createMetadatumDTO("dc", "date", "issued", "2016"); + MetadatumDTO date = createMetadatumDTO("dc", "date", "issued", "2016-05-19"); MetadatumDTO ispartof = createMetadatumDTO("dc", "relation", "ispartof", "Ukraïnsʹkij žurnal medicini, bìologìï ta sportu"); MetadatumDTO doi = createMetadatumDTO("dc", "identifier", "doi", "10.26693/jmbs01.02.184"); @@ -172,7 +172,7 @@ public class CrossRefImportMetadataSourceServiceIT extends AbstractLiveImportInt "Ischemic Heart Disease and Role of Nurse of Cardiology Department"); MetadatumDTO author2 = createMetadatumDTO("dc", "contributor", "author", "K. І. Kozak"); MetadatumDTO type2 = createMetadatumDTO("dc", "type", null, "journal-article"); - MetadatumDTO date2 = createMetadatumDTO("dc", "date", "issued", "2016"); + MetadatumDTO date2 = createMetadatumDTO("dc", "date", "issued", "2016-05-19"); MetadatumDTO ispartof2 = createMetadatumDTO("dc", "relation", "ispartof", "Ukraïnsʹkij žurnal medicini, bìologìï ta sportu"); MetadatumDTO doi2 = createMetadatumDTO("dc", "identifier", "doi", "10.26693/jmbs01.02.105"); diff --git a/dspace/config/spring/api/crossref-integration.xml b/dspace/config/spring/api/crossref-integration.xml index e01b613833..5d67c17626 100644 --- a/dspace/config/spring/api/crossref-integration.xml +++ b/dspace/config/spring/api/crossref-integration.xml @@ -69,8 +69,11 @@ - + + + + From c3dd1f8489a37fc177769ecd50a492397ab72502 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Mon, 22 May 2023 09:23:21 -0500 Subject: [PATCH 090/125] Update to latest Spring Boot --- pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pom.xml b/pom.xml index 828854c0ca..ef6227e6ff 100644 --- a/pom.xml +++ b/pom.xml @@ -20,7 +20,7 @@ 11 5.3.27 - 2.7.11 + 2.7.12 5.7.8 5.6.15.Final 6.2.5.Final From 7ab5f3b48925449cae681c05f305dffe86f970e6 Mon Sep 17 00:00:00 2001 From: Alan Orth Date: Wed, 17 May 2023 22:58:15 +0300 Subject: [PATCH 091/125] dspace-api: fix typo in ImageMagickThumbnailFilter --- .../org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java index d16243e3e3..9fa3101205 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java @@ -208,7 +208,7 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter { if (description != null) { if (replaceRegex.matcher(description).matches()) { if (verbose) { - System.out.format("%s %s matches pattern and is replacable.%n", + System.out.format("%s %s matches pattern and is replaceable.%n", description, nsrc); } continue; From c37622cb3f094d4f1b472db2a588e096422b0b62 Mon Sep 17 00:00:00 2001 From: Alan Orth Date: Wed, 17 May 2023 23:00:36 +0300 Subject: [PATCH 092/125] dspace-api: improve logging in ImageMagickThumbnailFilter Instead of logging the name of the source bitstream, we should be logging the name of the actual thumbnail bitstream that is being considered for replacement. For example, instead of this: IM Thumbnail manual.pdf matches pattern and is replaceable. ... the message should read: IM Thumbnail manual.pdf.jpg matches pattern and is replaceable. This message is already confusing enough, but this will help. --- .../dspace/app/mediafilter/ImageMagickThumbnailFilter.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java index 9fa3101205..b7c8acc6fd 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java @@ -209,14 +209,14 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter { if (replaceRegex.matcher(description).matches()) { if (verbose) { System.out.format("%s %s matches pattern and is replaceable.%n", - description, nsrc); + description, n); } continue; } if (description.equals(getDescription())) { if (verbose) { System.out.format("%s %s is replaceable.%n", - getDescription(), nsrc); + getDescription(), n); } continue; } From 7dbfa9a3c5e3c85daf2e83d5cbbeb547be93376f Mon Sep 17 00:00:00 2001 From: Alan Orth Date: Wed, 17 May 2023 23:10:29 +0300 Subject: [PATCH 093/125] dspace-api: minor logging fix in ImageMagickThumbnailFilter Minor standardization to logging (unneccessary capitalization and excessive spaces). --- .../org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java index b7c8acc6fd..383fc50868 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java @@ -221,7 +221,7 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter { continue; } } - System.out.format("Custom Thumbnail exists for %s for item %s. Thumbnail will not be generated.%n", + System.out.format("Custom thumbnail exists for %s for item %s. Thumbnail will not be generated.%n", nsrc, item.getHandle()); return false; } From a4105bdcb0a07f4d4b573a36010d39fa4a576552 Mon Sep 17 00:00:00 2001 From: Alan Orth Date: Wed, 17 May 2023 23:20:34 +0300 Subject: [PATCH 094/125] dspace-api: simplify logic in ImageMagickThumbnailFilter There is no point passing a page parameter here, with a default of 0 no less, because we will *always* use the first page of the PDF to generate the thumbnail. No other filters use this function and the page parameter is not configurable so we should just hard code it. --- .../dspace/app/mediafilter/ImageMagickPdfThumbnailFilter.java | 2 +- .../dspace/app/mediafilter/ImageMagickThumbnailFilter.java | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickPdfThumbnailFilter.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickPdfThumbnailFilter.java index 467303c3ca..e03e770f76 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickPdfThumbnailFilter.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickPdfThumbnailFilter.java @@ -22,7 +22,7 @@ public class ImageMagickPdfThumbnailFilter extends ImageMagickThumbnailFilter { File f2 = null; File f3 = null; try { - f2 = getImageFile(f, 0, verbose); + f2 = getImageFile(f, verbose); f3 = getThumbnailFile(f2, verbose); byte[] bytes = Files.readAllBytes(f3.toPath()); return new ByteArrayInputStream(bytes); diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java index 383fc50868..d156fdbc1a 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java @@ -116,7 +116,7 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter { return f2; } - public File getImageFile(File f, int page, boolean verbose) + public File getImageFile(File f, boolean verbose) throws IOException, InterruptedException, IM4JavaException { File f2 = new File(f.getParentFile(), f.getName() + ".jpg"); f2.deleteOnExit(); @@ -155,7 +155,7 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter { op.define("pdf:use-cropbox=true"); } - String s = "[" + page + "]"; + String s = "[0]"; op.addImage(f.getAbsolutePath() + s); if (configurationService.getBooleanProperty(PRE + ".flatten", true)) { op.flatten(); From 7ebb5290fc6a1c65b7ffc3f653e43932d8b79f02 Mon Sep 17 00:00:00 2001 From: Alan Orth Date: Thu, 18 May 2023 09:47:06 +0300 Subject: [PATCH 095/125] dspace-api: comment ImageMagick filters Add some comments to document the functionality of the ImageMagick thumbnail filters. This will help others understand it later when we need to re-factor them. --- .../app/mediafilter/ImageMagickPdfThumbnailFilter.java | 2 ++ .../dspace/app/mediafilter/ImageMagickThumbnailFilter.java | 7 +++++++ 2 files changed, 9 insertions(+) diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickPdfThumbnailFilter.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickPdfThumbnailFilter.java index e03e770f76..afe1bb3d75 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickPdfThumbnailFilter.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickPdfThumbnailFilter.java @@ -22,7 +22,9 @@ public class ImageMagickPdfThumbnailFilter extends ImageMagickThumbnailFilter { File f2 = null; File f3 = null; try { + // Step 1: get an image from our PDF file, with PDF-specific processing options f2 = getImageFile(f, verbose); + // Step 2: use the image above to create the final resized and rotated thumbnail f3 = getThumbnailFile(f2, verbose); byte[] bytes = Files.readAllBytes(f3.toPath()); return new ByteArrayInputStream(bytes); diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java index d156fdbc1a..450882ed17 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java @@ -116,6 +116,13 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter { return f2; } + /** + * Return an image from a bitstream with specific processing options for + * PDFs. This is only used by ImageMagickPdfThumbnailFilter in order to + * generate an intermediate image file for use with getThumbnailFile. It + * is unfortunate that this means we convert from PDF to JPEG to JPEG, + * which incurs generation loss. + */ public File getImageFile(File f, boolean verbose) throws IOException, InterruptedException, IM4JavaException { File f2 = new File(f.getParentFile(), f.getName() + ".jpg"); From 5357923d281d796cb72733ff9fa3244b3ef55490 Mon Sep 17 00:00:00 2001 From: Alan Orth Date: Thu, 18 May 2023 14:18:00 +0300 Subject: [PATCH 096/125] dspace-api: avoid generation loss in ImageMagickThumbnailFilter When filtering PDF bitstreams, the ImageMagickThumbnailFilter first creates an intermediate JPEG and then a "thumbnail" JPEG. These two operations are both lossy. The ImageMagick usage guide warns against doing that: > JPEG losses (sic) information, degrading images when saved. > Use some other format for intermediate images during processing. > Only use JPEG format, for the final image, not for further processing. As our current filter architecture requires writing a temporary file we must choose one of the following lossless formats to use for the intermediate: PNG, TIFF, or MIFF. MIFF is ImageMagick's own internal format and is much faster to write than PNG. By eliminating the first lossy conversion we gain 1.1% points on the ssimulacra2 (v2.1) scoring scale of visual quality. See: https://imagemagick.org/Usage/formats/#jpg --- .../app/mediafilter/ImageMagickThumbnailFilter.java | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java index 450882ed17..408982d157 100644 --- a/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java +++ b/dspace-api/src/main/java/org/dspace/app/mediafilter/ImageMagickThumbnailFilter.java @@ -119,13 +119,14 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter { /** * Return an image from a bitstream with specific processing options for * PDFs. This is only used by ImageMagickPdfThumbnailFilter in order to - * generate an intermediate image file for use with getThumbnailFile. It - * is unfortunate that this means we convert from PDF to JPEG to JPEG, - * which incurs generation loss. + * generate an intermediate image file for use with getThumbnailFile. */ public File getImageFile(File f, boolean verbose) throws IOException, InterruptedException, IM4JavaException { - File f2 = new File(f.getParentFile(), f.getName() + ".jpg"); + // Writing an intermediate file to disk is inefficient, but since we're + // doing it anyway, we should use a lossless format. IM's internal MIFF + // is lossless like PNG and TIFF, but much faster. + File f2 = new File(f.getParentFile(), f.getName() + ".miff"); f2.deleteOnExit(); ConvertCmd cmd = new ConvertCmd(); IMOperation op = new IMOperation(); From 05f1714b0566f2091590c1981e4a128db03ab428 Mon Sep 17 00:00:00 2001 From: Alexandre Vryghem Date: Mon, 10 Apr 2023 11:36:47 +0200 Subject: [PATCH 097/125] Replace occurrences of DSpace with the dspace.name variable --- dspace/config/dspace.cfg | 1 + dspace/config/emails/change_password | 12 ++++-------- dspace/config/emails/doi_maintenance_error | 4 +++- dspace/config/emails/export_error | 7 ++----- dspace/config/emails/export_success | 6 ++---- dspace/config/emails/feedback | 3 ++- dspace/config/emails/flowtask_notify | 4 ++-- dspace/config/emails/harvesting_error | 4 +++- dspace/config/emails/internal_error | 2 +- dspace/config/emails/register | 10 +++------- dspace/config/emails/registration_notify | 4 +++- dspace/config/emails/request_item.admin | 8 +++++--- dspace/config/emails/request_item.author | 8 +++++--- dspace/config/emails/submit_archive | 6 +++--- dspace/config/emails/submit_reject | 7 +++---- dspace/config/emails/submit_task | 6 +++--- dspace/config/emails/subscription | 4 ++-- dspace/config/emails/welcome | 5 ++--- 18 files changed, 49 insertions(+), 52 deletions(-) diff --git a/dspace/config/dspace.cfg b/dspace/config/dspace.cfg index 8e532310c1..11c53cb6a7 100644 --- a/dspace/config/dspace.cfg +++ b/dspace/config/dspace.cfg @@ -38,6 +38,7 @@ dspace.ui.url = http://localhost:4000 # Name of the site dspace.name = DSpace at My University +dspace.shortname = DSpace # Assetstore configurations have moved to config/modules/assetstore.cfg # and config/spring/api/bitstore.xml. diff --git a/dspace/config/emails/change_password b/dspace/config/emails/change_password index eb114feeeb..908e494596 100644 --- a/dspace/config/emails/change_password +++ b/dspace/config/emails/change_password @@ -4,19 +4,15 @@ ## ## See org.dspace.core.Email for information on the format of this file. ## -#set($subject = 'Change Password Request') +#set($subject = "${config.get('dspace.name')}: Change Password Request") #set($phone = ${config.get('mail.message.helpdesk.telephone')}) -To change the password for your DSpace account, please click the link -below: +To change the password for your ${config.get('dspace.name')} account, please click the link below: ${params[0]} -If you need assistance with your account, please email - - ${config.get("mail.helpdesk")} +If you need assistance with your account, please email ${config.get("mail.helpdesk")} #if( $phone ) - or call us at ${phone}. #end -The DSpace Team +The ${config.get('dspace.name')} Team diff --git a/dspace/config/emails/doi_maintenance_error b/dspace/config/emails/doi_maintenance_error index 5424432f64..a86de91546 100644 --- a/dspace/config/emails/doi_maintenance_error +++ b/dspace/config/emails/doi_maintenance_error @@ -10,9 +10,11 @@ ## ## See org.dspace.core.Email for information on the format of this file. ## -#set($subject = "DSpace: Error ${params[0]} DOI ${params[3]}") +#set($subject = "${config.get('dspace.name')}: Error ${params[0]} DOI ${params[3]}") Date: ${params[1]} ${params[0]} DOI ${params[4]} for ${params[2]} with ID ${params[3]} failed: ${params[5]} + +The ${config.get('dspace.name')} Team diff --git a/dspace/config/emails/export_error b/dspace/config/emails/export_error index 79468c281e..5223f64e33 100644 --- a/dspace/config/emails/export_error +++ b/dspace/config/emails/export_error @@ -6,14 +6,11 @@ ## ## See org.dspace.core.Email for information on the format of this file. ## -#set($subject = 'DSpace - The item export you requested was not completed.') +#set($subject = "${config.get('dspace.name')}: The item export you requested was not completed.") The item export you requested was not completed, due to the following reason: ${params[0]} For more information you may contact your system administrator: ${params[1]} - - -The DSpace Team - +The ${config.get('dspace.name')} Team diff --git a/dspace/config/emails/export_success b/dspace/config/emails/export_success index b97a379873..211e40dd78 100644 --- a/dspace/config/emails/export_success +++ b/dspace/config/emails/export_success @@ -5,7 +5,7 @@ ## ## See org.dspace.core.Email for information on the format of this file. ## -#set($subject = 'DSpace - Item export requested is ready for download') +#set($subject = "${config.get('dspace.name')}: Item export requested is ready for download") The item export you requested from the repository is now ready for download. You may download the compressed file using the following link: @@ -13,6 +13,4 @@ ${params[0]} This file will remain available for at least ${params[1]} hours. - -The DSpace Team - +The ${config.get('dspace.name')} Team diff --git a/dspace/config/emails/feedback b/dspace/config/emails/feedback index 7998367c26..5bf83eda76 100644 --- a/dspace/config/emails/feedback +++ b/dspace/config/emails/feedback @@ -10,7 +10,7 @@ ## ## See org.dspace.core.Email for information on the format of this file. ## -#set($subject = 'Feedback Form Information') +#set($subject = "${config.get('dspace.name')}: Feedback Form Information") Comments: @@ -24,3 +24,4 @@ Referring Page: ${params[3]} User Agent: ${params[4]} Session: ${params[5]} +The ${config.get('dspace.name')} Team diff --git a/dspace/config/emails/flowtask_notify b/dspace/config/emails/flowtask_notify index 7e5286e307..f277b7f2e7 100644 --- a/dspace/config/emails/flowtask_notify +++ b/dspace/config/emails/flowtask_notify @@ -7,7 +7,7 @@ ## {4} Task result ## {5} Workflow action taken ## -#set($subject = 'DSpace: Curation Task Report') +#set($subject = "${config.get('dspace.name')}: Curation Task Report") Title: ${params[0]} Collection: ${params[1]} @@ -20,4 +20,4 @@ ${params[4]} Action taken on the submission: ${params[5]} -DSpace +The ${config.get('dspace.name')} Team diff --git a/dspace/config/emails/harvesting_error b/dspace/config/emails/harvesting_error index d14b51fe82..40e4fa58e8 100644 --- a/dspace/config/emails/harvesting_error +++ b/dspace/config/emails/harvesting_error @@ -8,7 +8,7 @@ ## ## See org.dspace.core.Email for information on the format of this file. ## -#set($subject = 'DSpace: Harvesting Error') +#set($subject = "${config.get('dspace.name')}: Harvesting Error") Collection ${params[0]} failed on harvest: Date: ${params[1]} @@ -18,3 +18,5 @@ ${params[3]} Exception: ${params[4]} + +The ${config.get('dspace.name')} Team diff --git a/dspace/config/emails/internal_error b/dspace/config/emails/internal_error index ee622f4b38..266c91b116 100644 --- a/dspace/config/emails/internal_error +++ b/dspace/config/emails/internal_error @@ -10,7 +10,7 @@ ## ## See org.dspace.core.Email for information on the format of this file. ## -#set($subject = 'DSpace: Internal Server Error') +#set($subject = "${config.get('dspace.name')}: Internal Server Error") An internal server error occurred on ${params[0]}: Date: ${params[1]} diff --git a/dspace/config/emails/register b/dspace/config/emails/register index 694be449a8..87b005bc99 100644 --- a/dspace/config/emails/register +++ b/dspace/config/emails/register @@ -6,17 +6,13 @@ ## #set($subject = "${config.get('dspace.name')} Account Registration") #set($phone = ${config.get('mail.message.helpdesk.telephone')}) -To complete registration for a DSpace account, please click the link -below: +To complete registration for a ${config.get('dspace.name')} account, please click the link below: ${params[0]} -If you need assistance with your account, please email - - ${config.get("mail.helpdesk")} +If you need assistance with your account, please email ${config.get("mail.helpdesk")} #if( $phone ) - or call us at ${phone}. #end -The DSpace Team +The ${config.get('dspace.name')} Team diff --git a/dspace/config/emails/registration_notify b/dspace/config/emails/registration_notify index 96c87fa63d..0627d17fe0 100644 --- a/dspace/config/emails/registration_notify +++ b/dspace/config/emails/registration_notify @@ -8,10 +8,12 @@ ## ## See org.dspace.core.Email for information on the format of this file. ## -#set($subject = 'DSpace: Registration Notification') +#set($subject = "${config.get('dspace.name')}: Registration Notification") A new user has registered on ${params[0]} at ${params[1]}: Name: ${params[2]} Email: ${params[3]} Date: ${params[4]} + +The ${config.get('dspace.name')} Team diff --git a/dspace/config/emails/request_item.admin b/dspace/config/emails/request_item.admin index c0443c60f8..ee8daa510d 100644 --- a/dspace/config/emails/request_item.admin +++ b/dspace/config/emails/request_item.admin @@ -8,11 +8,13 @@ ## {4} the approver's email address ## See org.dspace.core.Email for information on the format of this file. ## -#set($subject = 'Request for Open Access') +#set($subject = "${config.get('dspace.name')}: Request for Open Access") ${params[3]}, with address ${params[4]}, requested the following document/file to be in Open Access: -Document Handle:${params[1]} +Document Handle: ${params[1]} File ID: ${params[0]} -Token:${params[2]} +Token: ${params[2]} + +The ${config.get('dspace.name')} Team diff --git a/dspace/config/emails/request_item.author b/dspace/config/emails/request_item.author index ac79270e7f..eb9c4f38f6 100644 --- a/dspace/config/emails/request_item.author +++ b/dspace/config/emails/request_item.author @@ -11,7 +11,7 @@ ## 8 corresponding author email ## 9 configuration property "dspace.name" ## 10 configuration property "mail.helpdesk" -#set($subject = 'Request copy of document') +#set($subject = "${config.get('dspace.name')}: Request copy of document") Dear ${params[7]}, @@ -21,10 +21,12 @@ This request came along with the following message: "${params[5]}" -To answer, click ${params[6]}. Whether you choose to grant or deny the request, we think that it''s in your best interest to respond. +To answer, click ${params[6]}. Whether you choose to grant or deny the request, we think that it's in your best interest to respond. -IF YOU ARE NOT AN AUTHOR OF THIS DOCUMENT, and only submitted the document on the author''s behalf, PLEASE REDIRECT THIS MESSAGE TO THE AUTHOR(S). Only the author(s) should answer the request to send a copy. +IF YOU ARE NOT AN AUTHOR OF THIS DOCUMENT, and only submitted the document on the author's behalf, PLEASE REDIRECT THIS MESSAGE TO THE AUTHOR(S). Only the author(s) should answer the request to send a copy. IF YOU ARE AN AUTHOR OF THE REQUESTED DOCUMENT, thank you for your cooperation! If you have any questions concerning this request, please contact ${params[10]}. + +The ${config.get('dspace.name')} Team diff --git a/dspace/config/emails/submit_archive b/dspace/config/emails/submit_archive index d3d62f7f4d..ea1c31a755 100644 --- a/dspace/config/emails/submit_archive +++ b/dspace/config/emails/submit_archive @@ -4,13 +4,13 @@ ## {1} Name of collection ## {2} handle ## -#set($subject = 'DSpace: Submission Approved and Archived') +#set($subject = "${config.get('dspace.name')}: Submission Approved and Archived") You submitted: ${params[0]} To collection: ${params[1]} -Your submission has been accepted and archived in DSpace, +Your submission has been accepted and archived in ${config.get('dspace.name')}, and it has been assigned the following identifier: ${params[2]} @@ -18,4 +18,4 @@ Please use this identifier when citing your submission. Many thanks! -DSpace +The ${config.get('dspace.name')} Team diff --git a/dspace/config/emails/submit_reject b/dspace/config/emails/submit_reject index 44e6cf2cd9..f5376cb3a0 100644 --- a/dspace/config/emails/submit_reject +++ b/dspace/config/emails/submit_reject @@ -6,7 +6,7 @@ ## {3} Reason for the rejection ## {4} Link to 'My DSpace' page ## -#set($subject = 'DSpace: Submission Rejected') +#set($subject = "${config.get('dspace.name')}: Submission Rejected") You submitted: ${params[0]} @@ -17,7 +17,6 @@ with the following explanation: ${params[3]} -Your submission has not been deleted. You can access it from your -"My DSpace" page: ${params[4]} +Your submission has not been deleted. You can access it from your "My${config.get('dspace.shortname')}" page: ${params[4]} -DSpace +The ${config.get('dspace.name')} Team diff --git a/dspace/config/emails/submit_task b/dspace/config/emails/submit_task index 8c8b4a7e72..f68bac80b1 100644 --- a/dspace/config/emails/submit_task +++ b/dspace/config/emails/submit_task @@ -6,7 +6,7 @@ ## {3} Description of task ## {4} link to 'my DSpace' page ## -#set($subject = 'DSpace: You have a new task') +#set($subject = "${config.get('dspace.name')}: You have a new task") A new item has been submitted: @@ -16,9 +16,9 @@ Submitted by: ${params[2]} ${params[3]} -To claim this task, please visit your "My DSpace" +To claim this task, please visit your "My${config.get('dspace.shortname')}" page: ${params[4]} Many thanks! -DSpace +The ${config.get('dspace.name')} Team diff --git a/dspace/config/emails/subscription b/dspace/config/emails/subscription index 2879e57907..5141192b57 100644 --- a/dspace/config/emails/subscription +++ b/dspace/config/emails/subscription @@ -4,9 +4,9 @@ ## Parameters: {0} is the details of the new collections and items ## See org.dspace.core.Email for information on the format of this file. ## -#set($subject = 'DSpace Subscription') +#set($subject = "${config.get('dspace.name')} Subscription") New items are available in the collections you have subscribed to: ${params[0]} -DSpace +The ${config.get('dspace.name')} Team diff --git a/dspace/config/emails/welcome b/dspace/config/emails/welcome index febc082e07..1c22cb05e8 100644 --- a/dspace/config/emails/welcome +++ b/dspace/config/emails/welcome @@ -3,13 +3,12 @@ ## See org.dspace.core.Email for information on the format of this file. ## #set($subject = "Welcome new registered ${config.get('dspace.name')} user!") -Thank you for registering an account. Your new account can be used immediately +Thank you for registering an account. Your new account can be used immediately to subscribe to notices of new content arriving in collections of your choice. Your new account can also be granted privileges to submit new content, or to edit and/or approve submissions. -If you need assistance with your account, please email -${config.get("mail.admin")}. +If you need assistance with your account, please email ${config.get("mail.helpdesk")}. The ${config.get('dspace.name')} Team From 9bc7edb73382a38c94a765551a0ccd4cb6a08458 Mon Sep 17 00:00:00 2001 From: Bui Thai Hai Date: Thu, 18 May 2023 13:59:12 +0700 Subject: [PATCH 098/125] Fix: Collection's admin cannot edit its template item. --- .../app/rest/ItemTemplateRestController.java | 4 +- ...lateItemRestPermissionEvaluatorPlugin.java | 83 +++++++++++++++++++ 2 files changed, 85 insertions(+), 2 deletions(-) create mode 100644 dspace-server-webapp/src/main/java/org/dspace/app/rest/security/TemplateItemRestPermissionEvaluatorPlugin.java diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ItemTemplateRestController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ItemTemplateRestController.java index e297dab44c..a6dbf3496e 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ItemTemplateRestController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ItemTemplateRestController.java @@ -120,7 +120,7 @@ public class ItemTemplateRestController { * @throws SQLException * @throws AuthorizeException */ - @PreAuthorize("hasPermission(#uuid, 'ITEM', 'WRITE')") + @PreAuthorize("hasPermission(#uuid, 'ITEMTEMPLATE', 'WRITE')") @RequestMapping(method = RequestMethod.PATCH) public ResponseEntity> patch(HttpServletRequest request, @PathVariable UUID uuid, @RequestBody(required = true) JsonNode jsonNode) @@ -153,7 +153,7 @@ public class ItemTemplateRestController { * @throws AuthorizeException * @throws IOException */ - @PreAuthorize("hasPermission(#uuid, 'ITEM', 'DELETE')") + @PreAuthorize("hasPermission(#uuid, 'ITEMTEMPLATE', 'DELETE')") @RequestMapping(method = RequestMethod.DELETE) public ResponseEntity> deleteTemplateItem(HttpServletRequest request, @PathVariable UUID uuid) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/TemplateItemRestPermissionEvaluatorPlugin.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/TemplateItemRestPermissionEvaluatorPlugin.java new file mode 100644 index 0000000000..cb977dff3a --- /dev/null +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/security/TemplateItemRestPermissionEvaluatorPlugin.java @@ -0,0 +1,83 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest.security; + +import java.io.Serializable; +import java.sql.SQLException; +import java.util.UUID; + +import org.apache.commons.lang3.StringUtils; +import org.dspace.app.rest.model.TemplateItemRest; +import org.dspace.app.rest.utils.ContextUtil; +import org.dspace.authorize.service.AuthorizeService; +import org.dspace.content.Collection; +import org.dspace.content.service.ItemService; +import org.dspace.core.Context; +import org.dspace.eperson.EPerson; +import org.dspace.services.RequestService; +import org.dspace.services.model.Request; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.security.core.Authentication; +import org.springframework.stereotype.Component; + +/** + * {@link RestObjectPermissionEvaluatorPlugin} class that evaluate WRITE and DELETE permission over a TemplateItem + * + * @author Bui Thai Hai (thaihai.bui@dlcorp.com.vn) + */ +@Component +public class TemplateItemRestPermissionEvaluatorPlugin extends RestObjectPermissionEvaluatorPlugin { + + private static final Logger log = LoggerFactory.getLogger(TemplateItemRestPermissionEvaluatorPlugin.class); + + @Autowired + private RequestService requestService; + + @Autowired + ItemService its; + + @Autowired + private AuthorizeService authorizeService; + + @Override + public boolean hasDSpacePermission(Authentication authentication, Serializable targetId, String targetType, + DSpaceRestPermission permission) { + + DSpaceRestPermission restPermission = DSpaceRestPermission.convert(permission); + if (!DSpaceRestPermission.WRITE.equals(restPermission) && + !DSpaceRestPermission.DELETE.equals(restPermission)) { + return false; + } + if (!StringUtils.equalsIgnoreCase(targetType, TemplateItemRest.NAME)) { + return false; + } + + Request request = requestService.getCurrentRequest(); + Context context = ContextUtil.obtainContext(request.getHttpServletRequest()); + + EPerson ePerson = context.getCurrentUser(); + if (ePerson == null) { + return false; + } + // Allow collection's admin to edit/delete the template + + UUID dsoId = UUID.fromString(targetId.toString()); + requestService.getCurrentRequest().getHttpServletRequest().getRequestURL(); + try { + Collection coll = its.find(context, dsoId).getTemplateItemOf(); + if (authorizeService.isAdmin(context, coll)) { + return true; + } + } catch (SQLException e) { + log.error(e.getMessage(), e); + } + return false; + } +} From 8980b07f2b29b00256c3020a40c080d78e5a6450 Mon Sep 17 00:00:00 2001 From: Francesco Pio Scognamiglio Date: Tue, 23 May 2023 08:51:07 +0200 Subject: [PATCH 099/125] [DURACOM-148] fix typo: remove trailing whitespace --- .../external/crossref/CrossRefDateMetadataProcessor.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefDateMetadataProcessor.java b/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefDateMetadataProcessor.java index c7d806cdf4..dec0b050f3 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefDateMetadataProcessor.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/crossref/CrossRefDateMetadataProcessor.java @@ -24,7 +24,7 @@ import org.joda.time.LocalDate; * This class is used for CrossRef's Live-Import to extract * issued attribute. * Beans are configured in the crossref-integration.xml file. - * + * * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) */ public class CrossRefDateMetadataProcessor implements JsonPathMetadataProcessor { From 50f808a7d003b0e185e790ca501546481e9c4d60 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Paulo=20Gra=C3=A7a?= Date: Tue, 23 May 2023 08:51:27 +0100 Subject: [PATCH 100/125] removing Person test configuration --- dspace/config/item-submission.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace/config/item-submission.xml b/dspace/config/item-submission.xml index f937a5fd9a..2f20e34c6b 100644 --- a/dspace/config/item-submission.xml +++ b/dspace/config/item-submission.xml @@ -66,7 +66,7 @@ --> - + From 2f59554a5d6ab40dd6dbb1272e69d7a25bdeaac8 Mon Sep 17 00:00:00 2001 From: Bui Thai Hai Date: Tue, 23 May 2023 14:11:04 +0700 Subject: [PATCH 101/125] ADD: Unit Tests for fix --- .../rest/ItemTemplateRestControllerIT.java | 46 +++++++++++++++++++ 1 file changed, 46 insertions(+) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemTemplateRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemTemplateRestControllerIT.java index 55e82831f3..1fd9e81ca8 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemTemplateRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ItemTemplateRestControllerIT.java @@ -33,6 +33,7 @@ import org.dspace.app.rest.test.AbstractControllerIntegrationTest; import org.dspace.authorize.service.ResourcePolicyService; import org.dspace.builder.CollectionBuilder; import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.ResourcePolicyBuilder; import org.dspace.content.Collection; import org.dspace.core.Constants; import org.hamcrest.Matchers; @@ -243,6 +244,35 @@ public class ItemTemplateRestControllerIT extends AbstractControllerIntegrationT ))))); } + /* Similar to patchTemplateItem(), except it is for collection admin, not repository admin + Test case was simplified, since it does not do anything else. + */ + @Test + public void patchTemplateItemAsCollectionAdmin() throws Exception { + setupTestTemplate(); + + String itemId = installTestTemplate(); + + ResourcePolicyBuilder.createResourcePolicy(context).withUser(eperson) + .withAction(Constants.ADMIN) + .withDspaceObject(childCollection).build(); + String collAdminToken = getAuthToken(eperson.getEmail(), password); + + getClient(collAdminToken).perform(patch(getTemplateItemUrlTemplate(itemId)) + .content(patchBody) + .contentType(contentType)) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.allOf( + hasJsonPath("$.type", is("itemtemplate")) + ))); + + getClient(collAdminToken).perform(get(getCollectionTemplateItemUrlTemplate(childCollection.getID().toString()))) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", Matchers.allOf( + hasJsonPath("$.type", is("itemtemplate")) + ))); + } + @Test public void patchIllegalInArchiveTemplateItem() throws Exception { setupTestTemplate(); @@ -337,6 +367,22 @@ public class ItemTemplateRestControllerIT extends AbstractControllerIntegrationT .andExpect(status().isNoContent()); } + /*Similar to deleteTemplateItem(), except it is for collection admin, not repository admin + */ + @Test + public void deleteTemplateItemAsCollectionAdmin() throws Exception { + setupTestTemplate(); + String itemId = installTestTemplate(); + + ResourcePolicyBuilder.createResourcePolicy(context).withUser(eperson) + .withAction(Constants.ADMIN) + .withDspaceObject(childCollection).build(); + String collAdminToken = getAuthToken(eperson.getEmail(), password); + + getClient(collAdminToken).perform(delete(getTemplateItemUrlTemplate(itemId))) + .andExpect(status().isNoContent()); + } + @Test public void deleteTemplateItemNoRights() throws Exception { setupTestTemplate(); From 571df9b38a735e7ed3eaae8e9132d5c9a499e661 Mon Sep 17 00:00:00 2001 From: "Mark H. Wood" Date: Wed, 24 May 2023 15:56:03 -0400 Subject: [PATCH 102/125] Add a simple test for new TimeHelpers class. --- .../java/org/dspace/util/TimeHelpersTest.java | 34 +++++++++++++++++++ 1 file changed, 34 insertions(+) create mode 100644 dspace-api/src/test/java/org/dspace/util/TimeHelpersTest.java diff --git a/dspace-api/src/test/java/org/dspace/util/TimeHelpersTest.java b/dspace-api/src/test/java/org/dspace/util/TimeHelpersTest.java new file mode 100644 index 0000000000..12055140a2 --- /dev/null +++ b/dspace-api/src/test/java/org/dspace/util/TimeHelpersTest.java @@ -0,0 +1,34 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.util; + +import static org.junit.Assert.assertEquals; + +import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import java.util.Date; + +import org.junit.Test; + +/** + * Test {@link TimeHelpers}. + * @author Mark H. Wood + */ +public class TimeHelpersTest { + /** + * Test of toMidnightUTC method, of class TimeHelpers. + */ + @Test + public void testToMidnightUTC() { + System.out.println("toMidnightUTC"); + Date from = Date.from(ZonedDateTime.of(1957, 01, 27, 04, 05, 06, 007, ZoneOffset.UTC).toInstant()); + Date expResult = Date.from(ZonedDateTime.of(1957, 01, 27, 00, 00, 00, 000, ZoneOffset.UTC).toInstant()); + Date result = TimeHelpers.toMidnightUTC(from); + assertEquals(expResult, result); + } +} From eb46a99dff6265aaf5d3e36a21cc2b8d8b3a7b6a Mon Sep 17 00:00:00 2001 From: Bui Thai Hai Date: Thu, 25 May 2023 09:57:13 +0700 Subject: [PATCH 103/125] Fix: default sort option (lastModified) for discovery --- .../DiscoverySortConfiguration.java | 14 ++++ .../discovery/utils/DiscoverQueryBuilder.java | 8 ++- .../org/dspace/discovery/DiscoveryIT.java | 68 +++++++++++++++++++ .../DiscoverConfigurationConverter.java | 9 +++ .../rest/model/SearchConfigurationRest.java | 10 +++ .../app/rest/DiscoveryRestControllerIT.java | 4 +- .../utils/RestDiscoverQueryBuilderTest.java | 15 ++++ dspace/config/spring/api/discovery.xml | 6 ++ dspace/solr/search/conf/schema.xml | 1 + 9 files changed, 132 insertions(+), 3 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoverySortConfiguration.java b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoverySortConfiguration.java index e251d1bc51..cd1a4eecb8 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoverySortConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/discovery/configuration/DiscoverySortConfiguration.java @@ -9,6 +9,7 @@ package org.dspace.discovery.configuration; import java.util.ArrayList; import java.util.List; +import javax.annotation.Nullable; import org.apache.commons.collections4.CollectionUtils; import org.apache.commons.lang3.StringUtils; @@ -22,6 +23,11 @@ public class DiscoverySortConfiguration { private List sortFields = new ArrayList(); + /** + * Default sort configuration to use when needed + */ + @Nullable private DiscoverySortFieldConfiguration defaultSortField; + public List getSortFields() { return sortFields; } @@ -30,6 +36,14 @@ public class DiscoverySortConfiguration { this.sortFields = sortFields; } + public DiscoverySortFieldConfiguration getDefaultSortField() { + return defaultSortField; + } + + public void setDefaultSortField(DiscoverySortFieldConfiguration configuration) { + this.defaultSortField = configuration; + } + public DiscoverySortFieldConfiguration getSortFieldConfiguration(String sortField) { if (StringUtils.isBlank(sortField)) { return null; diff --git a/dspace-api/src/main/java/org/dspace/discovery/utils/DiscoverQueryBuilder.java b/dspace-api/src/main/java/org/dspace/discovery/utils/DiscoverQueryBuilder.java index fa5cc32813..92a973dff8 100644 --- a/dspace-api/src/main/java/org/dspace/discovery/utils/DiscoverQueryBuilder.java +++ b/dspace-api/src/main/java/org/dspace/discovery/utils/DiscoverQueryBuilder.java @@ -332,7 +332,9 @@ public class DiscoverQueryBuilder implements InitializingBean { } private String getDefaultSortDirection(DiscoverySortConfiguration searchSortConfiguration, String sortOrder) { - if (Objects.nonNull(searchSortConfiguration.getSortFields()) && + if (searchSortConfiguration.getDefaultSortField() != null) { + sortOrder = searchSortConfiguration.getDefaultSortField().getDefaultSortOrder().name(); + } else if (Objects.nonNull(searchSortConfiguration.getSortFields()) && !searchSortConfiguration.getSortFields().isEmpty()) { sortOrder = searchSortConfiguration.getSortFields().get(0).getDefaultSortOrder().name(); } @@ -342,7 +344,9 @@ public class DiscoverQueryBuilder implements InitializingBean { private String getDefaultSortField(DiscoverySortConfiguration searchSortConfiguration) { String sortBy;// Attempt to find the default one, if none found we use SCORE sortBy = "score"; - if (Objects.nonNull(searchSortConfiguration.getSortFields()) && + if (searchSortConfiguration.getDefaultSortField() != null) { + sortBy = searchSortConfiguration.getDefaultSortField().getMetadataField(); + } else if (Objects.nonNull(searchSortConfiguration.getSortFields()) && !searchSortConfiguration.getSortFields().isEmpty()) { DiscoverySortFieldConfiguration defaultSort = searchSortConfiguration.getSortFields().get(0); if (StringUtils.isBlank(defaultSort.getMetadataField())) { diff --git a/dspace-api/src/test/java/org/dspace/discovery/DiscoveryIT.java b/dspace-api/src/test/java/org/dspace/discovery/DiscoveryIT.java index 0d1cc13106..0c3a52ec79 100644 --- a/dspace-api/src/test/java/org/dspace/discovery/DiscoveryIT.java +++ b/dspace-api/src/test/java/org/dspace/discovery/DiscoveryIT.java @@ -8,13 +8,16 @@ package org.dspace.discovery; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.sql.SQLException; import java.util.ArrayList; import java.util.Iterator; +import java.util.LinkedList; import java.util.List; +import java.util.stream.Collectors; import javax.servlet.http.HttpServletRequest; import org.dspace.AbstractIntegrationTestWithDatabase; @@ -24,6 +27,7 @@ import org.dspace.authorize.AuthorizeException; import org.dspace.builder.ClaimedTaskBuilder; import org.dspace.builder.CollectionBuilder; import org.dspace.builder.CommunityBuilder; +import org.dspace.builder.EPersonBuilder; import org.dspace.builder.ItemBuilder; import org.dspace.builder.PoolTaskBuilder; import org.dspace.builder.WorkflowItemBuilder; @@ -39,6 +43,8 @@ import org.dspace.content.factory.ContentServiceFactory; import org.dspace.content.service.CollectionService; import org.dspace.content.service.ItemService; import org.dspace.content.service.WorkspaceItemService; +import org.dspace.discovery.configuration.DiscoveryConfiguration; +import org.dspace.discovery.configuration.DiscoverySortFieldConfiguration; import org.dspace.discovery.indexobject.IndexableClaimedTask; import org.dspace.discovery.indexobject.IndexableCollection; import org.dspace.discovery.indexobject.IndexableItem; @@ -731,6 +737,68 @@ public class DiscoveryIT extends AbstractIntegrationTestWithDatabase { } } + /** + * Test designed to check if default sort option for Discovery is working, using workspace + * DiscoveryConfiguration
+ * Note: this test will be skipped if workspace do not have a default sort option set and of + * metadataType dc_date_accessioned or lastModified + * @throws SearchServiceException + */ + @Test + public void searchWithDefaultSortServiceTest() throws SearchServiceException { + + DiscoveryConfiguration workspaceConf = SearchUtils.getDiscoveryConfiguration("workspace", null); + // Skip if no default sort option set for workspaceConf + if (workspaceConf.getSearchSortConfiguration().getDefaultSortField() == null) { + return; + } + + DiscoverySortFieldConfiguration defaultSortField = + workspaceConf.getSearchSortConfiguration().getDefaultSortField(); + + // Populate the testing objects: create items in eperson's workspace and perform search in it + int numberItems = 10; + context.turnOffAuthorisationSystem(); + EPerson submitter = EPersonBuilder.createEPerson(context).withEmail("submitter@example.org").build(); + context.setCurrentUser(submitter); + Community community = CommunityBuilder.createCommunity(context).build(); + Collection collection = CollectionBuilder.createCollection(context, community).build(); + for (int i = 0; i < numberItems; i++) { + ItemBuilder.createItem(context, collection) + .withTitle("item " + i) + .build(); + } + + // Build query with default parameters (except for workspaceConf) + DiscoverQuery discoverQuery = SearchUtils.getQueryBuilder() + .buildQuery(context, new IndexableCollection(collection), workspaceConf,"",null,"Item",null,null, + null,null); + + DiscoverResult result = searchService.search(context, discoverQuery); + + if (defaultSortField.getMetadataField().equals("dc_date_accessioned")) { + // Verify that search results are sort by dc_date_accessioned + LinkedList dc_date_accesioneds = result.getIndexableObjects().stream() + .map(o -> ((Item) o.getIndexedObject()).getMetadata()) + .map(l -> l.stream().filter(m -> m.getMetadataField().toString().equals("dc_date_accessioned")) + .map(m -> m.getValue()).findFirst().orElse("") + ) + .collect(Collectors.toCollection(LinkedList::new)); + assertFalse(dc_date_accesioneds.isEmpty()); + for (int i = 1; i < dc_date_accesioneds.size() - 1; i++) { + assertTrue(dc_date_accesioneds.get(i).compareTo(dc_date_accesioneds.get(i + 1)) >= 0); + } + } else if (defaultSortField.getMetadataField().equals("lastModified")) { + LinkedList lastModifieds = result.getIndexableObjects().stream() + .map(o -> ((Item) o.getIndexedObject()).getLastModified().toString()) + .collect(Collectors.toCollection(LinkedList::new)); + assertFalse(lastModifieds.isEmpty()); + for (int i = 1; i < lastModifieds.size() - 1; i++) { + assertTrue(lastModifieds.get(i).compareTo(lastModifieds.get(i + 1)) >= 0); + } + } + } + private void assertSearchQuery(String resourceType, int size) throws SearchServiceException { assertSearchQuery(resourceType, size, size, 0, -1); } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/DiscoverConfigurationConverter.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/DiscoverConfigurationConverter.java index 73851bd945..41cf235a87 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/DiscoverConfigurationConverter.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/DiscoverConfigurationConverter.java @@ -80,6 +80,15 @@ public class DiscoverConfigurationConverter sortOption.setSortOrder(discoverySearchSortConfiguration.getDefaultSortOrder().name()); searchConfigurationRest.addSortOption(sortOption); } + + DiscoverySortFieldConfiguration defaultSortField = searchSortConfiguration.getDefaultSortField(); + if (defaultSortField != null) { + SearchConfigurationRest.SortOption sortOption = new SearchConfigurationRest.SortOption(); + sortOption.setName(defaultSortField.getMetadataField()); + sortOption.setActualName(defaultSortField.getType()); + sortOption.setSortOrder(defaultSortField.getDefaultSortOrder().name()); + searchConfigurationRest.setDefaultSortOption(sortOption); + } } } diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SearchConfigurationRest.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SearchConfigurationRest.java index 7ec1b22500..b25d827e75 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SearchConfigurationRest.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/model/SearchConfigurationRest.java @@ -31,6 +31,8 @@ public class SearchConfigurationRest extends BaseObjectRest { private List filters = new LinkedList<>(); private List sortOptions = new LinkedList<>(); + private SortOption defaultSortOption; + public String getCategory() { return CATEGORY; } @@ -75,6 +77,14 @@ public class SearchConfigurationRest extends BaseObjectRest { return sortOptions; } + public SortOption getDefaultSortOption() { + return defaultSortOption; + } + + public void setDefaultSortOption(SortOption defaultSortOption) { + this.defaultSortOption = defaultSortOption; + } + @Override public boolean equals(Object object) { return (object instanceof SearchConfigurationRest && diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRestControllerIT.java index a115c8aa2f..dd0b2fe576 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRestControllerIT.java @@ -1286,8 +1286,10 @@ public class DiscoveryRestControllerIT extends AbstractControllerIntegrationTest context.restoreAuthSystemState(); + //Update this test since dc.date.accessioned is now configured for workspace configuration, + // which will return status 400 instead of 422 if left unchanged getClient().perform(get("/api/discover/search/objects") - .param("sort", "dc.date.accessioned, ASC") + .param("sort", "person.familyName, ASC") .param("configuration", "workspace")) .andExpect(status().isUnprocessableEntity()); } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/utils/RestDiscoverQueryBuilderTest.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/utils/RestDiscoverQueryBuilderTest.java index 6c9544d2f9..511bb8f98b 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/utils/RestDiscoverQueryBuilderTest.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/utils/RestDiscoverQueryBuilderTest.java @@ -115,6 +115,8 @@ public class RestDiscoverQueryBuilderTest { sortConfiguration.setSortFields(listSortField); + sortConfiguration.setDefaultSortField(defaultSort); + discoveryConfiguration.setSearchSortConfiguration(sortConfiguration); DiscoverySearchFilterFacet subjectFacet = new DiscoverySearchFilterFacet(); @@ -167,6 +169,19 @@ public class RestDiscoverQueryBuilderTest { page.getOffset(), "SCORE", "ASC"); } + @Test + public void testSortByDefaultSortField() throws Exception { + page = PageRequest.of(2, 10, Sort.Direction.DESC, "dc.date.accessioned"); + restQueryBuilder.buildQuery(context, null, discoveryConfiguration, null, null, emptyList(), page); + + verify(discoverQueryBuilder, times(1)) + .buildQuery(context, null, discoveryConfiguration, null, emptyList(), emptyList(), + page.getPageSize(), page.getOffset(), + discoveryConfiguration.getSearchSortConfiguration().getDefaultSortField().getMetadataField(), + discoveryConfiguration.getSearchSortConfiguration().getDefaultSortField() + .getDefaultSortOrder().name().toUpperCase()); + } + @Test(expected = DSpaceBadRequestException.class) public void testCatchIllegalArgumentException() throws Exception { when(discoverQueryBuilder.buildQuery(any(), any(), any(), any(), any(), anyList(), any(), any(), any(), diff --git a/dspace/config/spring/api/discovery.xml b/dspace/config/spring/api/discovery.xml index 611e77b27b..3f0f507451 100644 --- a/dspace/config/spring/api/discovery.xml +++ b/dspace/config/spring/api/discovery.xml @@ -865,8 +865,11 @@ + + + @@ -938,6 +941,8 @@ + + @@ -1015,6 +1020,7 @@ + diff --git a/dspace/solr/search/conf/schema.xml b/dspace/solr/search/conf/schema.xml index caa646ba1b..df21afbc64 100644 --- a/dspace/solr/search/conf/schema.xml +++ b/dspace/solr/search/conf/schema.xml @@ -283,6 +283,7 @@ + From b3a21ebd5a81701899753d83511746ab90f36cce Mon Sep 17 00:00:00 2001 From: Bui Thai Hai Date: Thu, 25 May 2023 14:54:46 +0700 Subject: [PATCH 104/125] Minor Tweaks --- .../java/org/dspace/app/rest/DiscoveryRestControllerIT.java | 4 +--- dspace/config/spring/api/discovery.xml | 4 ++-- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRestControllerIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRestControllerIT.java index dd0b2fe576..a115c8aa2f 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRestControllerIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/DiscoveryRestControllerIT.java @@ -1286,10 +1286,8 @@ public class DiscoveryRestControllerIT extends AbstractControllerIntegrationTest context.restoreAuthSystemState(); - //Update this test since dc.date.accessioned is now configured for workspace configuration, - // which will return status 400 instead of 422 if left unchanged getClient().perform(get("/api/discover/search/objects") - .param("sort", "person.familyName, ASC") + .param("sort", "dc.date.accessioned, ASC") .param("configuration", "workspace")) .andExpect(status().isUnprocessableEntity()); } diff --git a/dspace/config/spring/api/discovery.xml b/dspace/config/spring/api/discovery.xml index 3f0f507451..45e5829e1a 100644 --- a/dspace/config/spring/api/discovery.xml +++ b/dspace/config/spring/api/discovery.xml @@ -866,10 +866,10 @@ - + - + From f3b939e88f63fd83f56dd1ea7f29781ce398d4fe Mon Sep 17 00:00:00 2001 From: Yana De Pauw Date: Thu, 25 May 2023 15:34:05 +0300 Subject: [PATCH 105/125] 94299: Add rest.patch.operations.limit to config file --- .../dspace/app/rest/repository/BitstreamRestRepository.java | 2 +- .../java/org/dspace/app/rest/BitstreamRestRepositoryIT.java | 4 ++-- dspace/config/modules/rest.cfg | 4 ++++ 3 files changed, 7 insertions(+), 3 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java index 454b6f8453..12e27dccac 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/BitstreamRestRepository.java @@ -265,7 +265,7 @@ public class BitstreamRestRepository extends DSpaceObjectRestRepository ops = new ArrayList<>(); RemoveOperation removeOp1 = new RemoveOperation(OPERATION_PATH_BITSTREAM_REMOVE + bitstream1.getID()); ops.add(removeOp1); @@ -2577,7 +2577,7 @@ public class BitstreamRestRepositoryIT extends AbstractControllerIntegrationTest String token = getAuthToken(admin.getEmail(), password); Assert.assertTrue(bitstreamExists(token, bitstream1, bitstream2, bitstream3, bitstream4)); - DSpaceServicesFactory.getInstance().getConfigurationService().setProperty("patch.operations.limit", 2); + DSpaceServicesFactory.getInstance().getConfigurationService().setProperty("rest.patch.operations.limit", 2); getClient(token).perform(patch("/api/core/bitstreams") .content(patchBody) diff --git a/dspace/config/modules/rest.cfg b/dspace/config/modules/rest.cfg index 6421258c57..657e02b58d 100644 --- a/dspace/config/modules/rest.cfg +++ b/dspace/config/modules/rest.cfg @@ -25,6 +25,10 @@ rest.projections.full.max = 2 # This property determines the max embed depth for a SpecificLevelProjection rest.projection.specificLevel.maxEmbed = 5 +# This property determines the max amount of rest operations that can be performed at the same time, for example when +# batch removing bitstreams. The default value is set to 1000. +rest.patch.operations.limit = 1000 + # Define which configuration properties are exposed through the http:///api/config/properties/ # rest endpoint. If a rest request is made for a property which exists, but isn't listed here, the server will # respond that the property wasn't found. This property can be defined multiple times to allow access to multiple From 4fa51d03d11259cd67506247be406d5b7faa7e35 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Paulo=20Gra=C3=A7a?= Date: Fri, 26 May 2023 17:14:10 +0100 Subject: [PATCH 106/125] adding support for access status xoai plugin --- .../AccessStatusElementItemCompilePlugin.java | 67 ++++++++++++++++ .../oai/metadataFormats/oai_openaire.xsl | 78 +++++++++++++++++-- dspace/config/spring/oai/oai.xml | 4 + 3 files changed, 144 insertions(+), 5 deletions(-) create mode 100644 dspace-oai/src/main/java/org/dspace/xoai/app/plugins/AccessStatusElementItemCompilePlugin.java diff --git a/dspace-oai/src/main/java/org/dspace/xoai/app/plugins/AccessStatusElementItemCompilePlugin.java b/dspace-oai/src/main/java/org/dspace/xoai/app/plugins/AccessStatusElementItemCompilePlugin.java new file mode 100644 index 0000000000..65ec251b21 --- /dev/null +++ b/dspace-oai/src/main/java/org/dspace/xoai/app/plugins/AccessStatusElementItemCompilePlugin.java @@ -0,0 +1,67 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.xoai.app.plugins; + +import java.sql.SQLException; +import java.util.List; +import org.dspace.access.status.factory.AccessStatusServiceFactory; +import org.dspace.access.status.service.AccessStatusService; +import org.dspace.content.Item; +import org.dspace.core.Context; +import org.dspace.xoai.app.XOAIExtensionItemCompilePlugin; +import org.dspace.xoai.util.ItemUtils; +import com.lyncode.xoai.dataprovider.xml.xoai.Element; +import com.lyncode.xoai.dataprovider.xml.xoai.Metadata; + +/** + * AccessStatusElementItemCompilePlugin aims to add structured information about the + * Access Status of the item (if any). + + * The xoai document will be enriched with a structure like that + *
+ * {@code
+ *   
+ *       
+ *          open.access
+ *       ;
+ *   ;
+ * }
+ * 
+ * Returning Values are based on: + * @see org.dspace.access.status.DefaultAccessStatusHelper DefaultAccessStatusHelper + */ +public class AccessStatusElementItemCompilePlugin implements XOAIExtensionItemCompilePlugin { + + @Override + public Metadata additionalMetadata(Context context, Metadata metadata, Item item) { + AccessStatusService accessStatusService = AccessStatusServiceFactory.getInstance().getAccessStatusService(); + + try { + String accessStatusType; + accessStatusType = accessStatusService.getAccessStatus(context, item); + + Element accessStatus = ItemUtils.create("access-status"); + accessStatus.getField().add(ItemUtils.createValue("value", accessStatusType)); + + Element others; + List elements = metadata.getElement(); + if (ItemUtils.getElement(elements, "others") != null) { + others = ItemUtils.getElement(elements, "others"); + } else { + others = ItemUtils.create("others"); + } + others.getElement().add(accessStatus); + + } catch (SQLException e) { + e.printStackTrace(); + } + + return metadata; + } + +} diff --git a/dspace/config/crosswalks/oai/metadataFormats/oai_openaire.xsl b/dspace/config/crosswalks/oai/metadataFormats/oai_openaire.xsl index 7b66eaf043..19b1486f4c 100644 --- a/dspace/config/crosswalks/oai/metadataFormats/oai_openaire.xsl +++ b/dspace/config/crosswalks/oai/metadataFormats/oai_openaire.xsl @@ -93,6 +93,9 @@ + + @@ -658,6 +661,40 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -1207,7 +1244,7 @@ - + + + + + + + + + + + open access + + + embargoed access + + + restricted access + + + metadata only access + + + + + + + + From e889abc6238b257bbde537814e011a842f4512d1 Mon Sep 17 00:00:00 2001 From: nwoodward Date: Fri, 26 May 2023 14:21:57 -0500 Subject: [PATCH 107/125] check that zip file exists and has correct MIME type; also make sure that common temp imports directory is not removed --- .../org/dspace/app/itemimport/ItemImport.java | 49 ++++++++++++++++-- .../dspace/app/itemimport/ItemImportCLI.java | 23 +++++++- .../app/itemimport/ItemImportCLIIT.java | 24 +++++++++ .../org/dspace/app/itemimport/test.pdf | Bin 0 -> 56812 bytes .../dspace/app/itemimport/ItemImportIT.java | 6 +++ 5 files changed, 95 insertions(+), 7 deletions(-) create mode 100644 dspace-api/src/test/resources/org/dspace/app/itemimport/test.pdf diff --git a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java index ac9db76051..bcf7afed38 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java +++ b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java @@ -23,6 +23,7 @@ import java.util.UUID; import org.apache.commons.cli.ParseException; import org.apache.commons.io.FileUtils; import org.apache.commons.lang3.StringUtils; +import org.apache.tika.Tika; import org.dspace.app.itemimport.factory.ItemImportServiceFactory; import org.dspace.app.itemimport.service.ItemImportService; import org.dspace.authorize.AuthorizeException; @@ -77,6 +78,7 @@ public class ItemImport extends DSpaceRunnable { protected boolean zip = false; protected boolean remoteUrl = false; protected String zipfilename = null; + protected boolean zipvalid= false; protected boolean help = false; protected File workDir = null; protected File workFile = null; @@ -235,11 +237,19 @@ public class ItemImport extends DSpaceRunnable { handler.logInfo("***End of Test Run***"); } } finally { - // clean work dir if (zip) { - FileUtils.deleteDirectory(new File(sourcedir)); - FileUtils.deleteDirectory(workDir); - if (remoteUrl && workFile != null && workFile.exists()) { + // if zip file was valid then clean sourcedir + if (zipvalid && sourcedir != null && new File(sourcedir).exists()) { + FileUtils.deleteDirectory(new File(sourcedir)); + } + + // clean workdir + if (workDir != null && workDir.exists()) { + FileUtils.deleteDirectory(workDir); + } + + // conditionally clean workFile if import was done in the UI or via a URL and it still exists + if (workFile != null && workFile.exists()) { workFile.delete(); } } @@ -329,7 +339,14 @@ public class ItemImport extends DSpaceRunnable { // manage zip via remote url optionalFileStream = Optional.ofNullable(new URL(zipfilename).openStream()); } + if (optionalFileStream.isPresent()) { + // validate zip file + Optional validationFileStream = handler.getFileStream(context, zipfilename); + if (validationFileStream.isPresent()) { + validateZip(validationFileStream.get()); + } + workFile = new File(itemImportService.getTempWorkDir() + File.separator + zipfilename + "-" + context.getCurrentUser().getID()); FileUtils.copyInputStreamToFile(optionalFileStream.get(), workFile); @@ -337,10 +354,32 @@ public class ItemImport extends DSpaceRunnable { throw new IllegalArgumentException( "Error reading file, the file couldn't be found for filename: " + zipfilename); } - workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR); + + workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR + + File.separator + context.getCurrentUser().getID()); sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath()); } + /** + * Confirm that the zip file has the correct MIME type + * @param inputStream + */ + protected void validateZip(InputStream inputStream) { + Tika tika = new Tika(); + try { + String mimeType = tika.detect(inputStream); + if (mimeType.equals("application/zip")) { + zipvalid = true; + } else { + handler.logError("A valid zip file must be supplied. The provided file has mimetype: " + mimeType); + throw new UnsupportedOperationException("A valid zip file must be supplied"); + } + } catch (IOException e) { + throw new IllegalArgumentException( + "There was an error while reading the zip file: " + zipfilename); + } + } + /** * Read the mapfile * @param context diff --git a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLI.java b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLI.java index 1a71a8c4c0..98d2469b71 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLI.java +++ b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImportCLI.java @@ -8,6 +8,7 @@ package org.dspace.app.itemimport; import java.io.File; +import java.io.FileInputStream; import java.io.InputStream; import java.net.URL; import java.sql.SQLException; @@ -101,6 +102,17 @@ public class ItemImportCLI extends ItemImport { // If this is a zip archive, unzip it first if (zip) { if (!remoteUrl) { + // confirm zip file exists + File myZipFile = new File(sourcedir + File.separator + zipfilename); + if ((!myZipFile.exists()) || (!myZipFile.isFile())) { + throw new IllegalArgumentException( + "Error reading file, the file couldn't be found for filename: " + zipfilename); + } + + // validate zip file + InputStream validationFileStream = new FileInputStream(myZipFile); + validateZip(validationFileStream); + workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR + File.separator + context.getCurrentUser().getID()); sourcedir = itemImportService.unzip( @@ -109,15 +121,22 @@ public class ItemImportCLI extends ItemImport { // manage zip via remote url Optional optionalFileStream = Optional.ofNullable(new URL(zipfilename).openStream()); if (optionalFileStream.isPresent()) { + // validate zip file via url + Optional validationFileStream = Optional.ofNullable(new URL(zipfilename).openStream()); + if (validationFileStream.isPresent()) { + validateZip(validationFileStream.get()); + } + workFile = new File(itemImportService.getTempWorkDir() + File.separator + zipfilename + "-" + context.getCurrentUser().getID()); FileUtils.copyInputStreamToFile(optionalFileStream.get(), workFile); + workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR + + File.separator + context.getCurrentUser().getID()); + sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath()); } else { throw new IllegalArgumentException( "Error reading file, the file couldn't be found for filename: " + zipfilename); } - workDir = new File(itemImportService.getTempWorkDir() + File.separator + TEMP_DIR); - sourcedir = itemImportService.unzip(workFile, workDir.getAbsolutePath()); } } } diff --git a/dspace-api/src/test/java/org/dspace/app/itemimport/ItemImportCLIIT.java b/dspace-api/src/test/java/org/dspace/app/itemimport/ItemImportCLIIT.java index 411e8de4df..02a0a8aee0 100644 --- a/dspace-api/src/test/java/org/dspace/app/itemimport/ItemImportCLIIT.java +++ b/dspace-api/src/test/java/org/dspace/app/itemimport/ItemImportCLIIT.java @@ -8,6 +8,7 @@ package org.dspace.app.itemimport; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; import java.io.File; import java.nio.file.Files; @@ -33,6 +34,7 @@ import org.dspace.content.service.ItemService; import org.dspace.content.service.RelationshipService; import org.dspace.services.ConfigurationService; import org.dspace.services.factory.DSpaceServicesFactory; +import org.flywaydb.core.internal.util.ExceptionUtils; import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -46,6 +48,7 @@ import org.junit.Test; public class ItemImportCLIIT extends AbstractIntegrationTestWithDatabase { private static final String ZIP_NAME = "saf.zip"; + private static final String PDF_NAME = "test.pdf"; private static final String publicationTitle = "A Tale of Two Cities"; private static final String personTitle = "Person Test"; @@ -55,6 +58,7 @@ public class ItemImportCLIIT extends AbstractIntegrationTestWithDatabase { private Collection collection; private Path tempDir; private Path workDir; + private static final String TEMP_DIR = ItemImport.TEMP_DIR; @Before @Override @@ -226,6 +230,10 @@ public class ItemImportCLIIT extends AbstractIntegrationTestWithDatabase { checkMetadata(); checkMetadataWithAnotherSchema(); checkBitstream(); + + // confirm that TEMP_DIR still exists + File workTempDir = new File(workDir + File.separator + TEMP_DIR); + assertTrue(workTempDir.exists()); } @Test @@ -254,6 +262,22 @@ public class ItemImportCLIIT extends AbstractIntegrationTestWithDatabase { checkRelationship(); } + @Test + public void importItemByZipSafInvalidMimetype() throws Exception { + // use sample PDF file + Files.copy(getClass().getResourceAsStream("test.pdf"), + Path.of(tempDir.toString() + "/" + PDF_NAME)); + + String[] args = new String[] { "import", "-a", "-e", admin.getEmail(), "-c", collection.getID().toString(), + "-s", tempDir.toString(), "-z", PDF_NAME, "-m", tempDir.toString() + "/mapfile.out" }; + try { + perfomImportScript(args); + } catch (Exception e) { + // should throw an exception due to invalid mimetype + assertEquals(UnsupportedOperationException.class, ExceptionUtils.getRootCause(e).getClass()); + } + } + @Test public void resumeImportItemBySafWithMetadataOnly() throws Exception { // create simple SAF diff --git a/dspace-api/src/test/resources/org/dspace/app/itemimport/test.pdf b/dspace-api/src/test/resources/org/dspace/app/itemimport/test.pdf new file mode 100644 index 0000000000000000000000000000000000000000..5b3749cbff73a41baf8aa06b7f62339886bc6ca2 GIT binary patch literal 56812 zcmdSAWmH|=wk=2?K!Ow8-GXo2-QC^UIBXn(TX1&`1b26LcMmSX-5rYL-0$9V+Iz36 ze$=0$HQJtQtuf~4W6ZH;?`sR0ys#)OBOMz&3|Za#A3O{rAp;@E&;lNYo10$wn=O!D z$iUIS3S>$zZ(s^^AOyT$R-~6UurZ|s+R&;fQ4!KBIodlJIV#%&fr=oIBOxQpUlSPv zdrJu$6VUrKe}+I~!gn(uFE70e(9yv7eNI9qj=zSg=0F#qy}Uip1ZWSmF?x@|%<$&| zFE2a{(8l;LmOoMd1JqwI^gNk{)lp2z(ZCV-eqBt7jgak6u=f!=A^V@v zyM??x$Vdt3s73!?QF>*dt0TRH^?S(#|MepDuNR3w_})ukY;GV3a@BfwWgui_1Q4=u z0CWi7^RaP!NA_NUf6ZUFwoiD2&gC~ z_&%?)fuX~@$-frlUs*a3GW>f%{!IK&K}5~19D(-qqE_!&2?LEl#z1;$ppB`c86hKp zmHCgCgQGprz#1OLHN8x2J!**^$xCd@JK3Ei;gj<-Nr$w zOT0dWYUJgH&4{99vU3X9BV>Dg;lZ>GGQq|4^joM9BcM35@rf9)^fYYJpw@aVCcpM0bz1drdfU8G6jA=#C+*NKP(v>OP83QJ{k^e zO036Hyh0Jb^606Q8xp4L6@%Pc67}EIp?wI!U3Bqyzxs0q7ars7_Skbdx1y1f7}Q+? z#*OrbuHf1WHj*5*T^2#LW#o5Kq^gGc0w~{e46E`?02KY3t=Pt7)06l|fU4h)&@Utz zLWx>Mk>iS=N}&(q`L6ub>op3R>{vVG2F+5Rq1&x-UpTiDdKUu=q&ZwF4aA_sZWujy z-DcsNLz@U1rub0FMAI$Cm#U81mrALc40KB?0jlNTi_&@p{pjFYP3@p+EPQ2&IlDA1 z6Ex)y1REelpHT4w+)#q+H+4g`p^m|RyitkzMj=mcG%eJ7_;T*US0U?&4^o=yv8V!X zgO^_7XZ-6**B$*rn$bgOdR1I@8J6JzIr z$Le;hD7c7U@+BbR1v5JDSFH=3S?NOE<7blE5~&s&L)HnmR>4kEyK>$(5$|PW>pN z=W0gx+bwskC!`zg$%6OwRhmK$UI-7d<%T=glf_grj@ zu8T{UO0H>RNiE<}W<~#_1`k$O2syLDmK~@=7ww^ddIzo2|KxDZQGx zjew1V`F|h(u@?q97}=ZKI)dy8S^u(tsJXp^qmY?_Js}g|op}Fi%m@IyyBj;2IS?}a z7X$qdUH)@fL&X1)>whyf zy`T)eEXdy4z>40;fRORuY4A_Q{vVx$1pd?Mj}-V1r~hn#_by=gf3@lo#_y#xcl<^x z{$I5;Hn4I0GnWG)<6q@NuMASLG5h$iGAXYw{O{79rywvGy)`{%7U?p7{sl z|4j@2)3!nq!ha(F>0H3SbjIJL-cKuo#3Y1e3~cHDflDu}_Kxp;?cOgceRFtUsXy}W zT`T;-@OQ!fr^5&t|B5ksHA1GpLg_ye_;)cUXKxI&e_wpcf1nC2K)~(^Eb#p$^Q2(`9CQBg8Kh+pZ`tZU8Vs6giL=oga4@GziT;V zb4M#6r2yd{-AVX<2iO}JIhuoP-j5v|357vMPJgRs^1i7EE7=+t{i&*elcO2Pp7M`4 zCsYH0jNeZQz01A#jfe8}4NL~i9LxyJ9t;HL@cwTC<_J~}MhK?%KDGxlejl0q9TEO- z>W+ng&fR=%Uf`NhJ-Ao2#{QqF_F0GB7 zjDY`N0sHqV0{*=l|Mrlap@ov8HN7k$BM1H8z4yJDncheLC$Y@T0{AoGKXmb~m6c^I zxB21PFdhjUSAOh{Y$H;B2hL2(V+-Og3N7|9Pm$->7qFw4y?N$^FE>k39s%jUP1pPo zC5gx;6Q6`C1<&w_#jb?}!tbNjO7oivt50%ZN=Sd`H!&6@4pE3!H)Jj%Oa5q8+=VHQ z2K-7m20Q$*DKSftk{tvknFpgRo2+CHVEL}C8_GdVN+V3QYxr;(kPWwLfLCs5)w>Gw zn;z+k7BjM`nUn7cHUyh~YMacYl@yD>L&p44xB`qOeY-r}WG@L-a2B3{CHGPtPHhy(ir( z8WpyhS3TIJU_8$!vU{D78unQiMbcP6x3y&@=pz~1ZYlR`iclC|*KfTriq|82YBFCc za{`DIjYdNy3mXwUS97LwA6K-~p91lZdzDlZwS_QXcp6UgPJAgSwH7#g$CxYpcj>o{ zoI4@6!bb}QdiFIT+v`mAMX+Uu!i?C-AWA@6vC0Lqo^|*&SJ4^y#Iz*C071Fn0O4BX zfYZ+sf@nW^oSWvgis?-*Y4dk-6F4l`aHbVQtVMrcut5&NNJt?3M&RQD5Z?*o2e0Vj z$;;V2`NuXN-xhiXdkLq$)Zh)EHZ&dc>$DfH+0>kIZTl(Lx z!M`h;|MMC!voXAjiGMxJGQN`*fSvU(as1Ccdh7}75#Kj=my%)%l3;D|U19qr#hM12 z799;6T^UQE=IaxXW{fN?*r-Q}$-jvfqVmxn=If1a*H_8D`7u#P0qUuR7L}^91<0&ajrsJbbHJ^Pkk^4v#%tm^tM98Q{ z^0j4q(S4Se+H1>lj)a}i6s+-)y>mV1U`%&$U&$3lyoirft~ht;bq*V3h>H2dQMWTY zjBi@$5v8Y4oTSN?Zl3*`tQ{gpIh;Q)J>s%^e8BXlAgvI1`bB9yo zJcmK2F~8<~HdnEOJTmBWKb~iuC}Ux4T%0T#Lm45n5a#YuFHTs6woBKX#7G(4w!a^)r)5 z++RSa$VohoFVRK}sHQj0yZyBL5=t(k2VU4Urzba4=4jQ|&{3=FDhU9)s39|*{J12n zI!qEdAo*2CCie;T#a{e7rKy&{E@KMxKZfo(kqmzg>BV(!f_2kQ4T#AJVpw?qU(QuyRc#4{YmOT1f;$EZ^Z z7=>U#)NG{hl+SOUtxR3d_?83DU^$mNqRReWIG^|^y4fb0m5}{p^4Cn5=)XPw*Ws&O z7PrJzdlzafjGHm<-FS%9p!F*gGA>L%!Mo$?<{kOFY}{hfN=_n*jqFYe_|hEZZfqcpJ(Xa8glFD?wkHp7+4GXGt` zQkwz4O>)2NHfL_1&k@Gm!Ja?ZvEjpsd}2|YQ^ouvpTFgVtCk8E!TUTz&i!6@TkGZ6 zb)!ZN;dQ?zgW!kI^@4ZiR>y~j0VLIF-b1DefSzCI<4gk9UJM_(9XUTG>MBg%bwrmg&Q%~ZL#MIbyc2{}7xi6r| zun0yhc()8<_8dPq7oifS=oxHz`xT>p)ZywD`0#or%(4#@>hPvVAJfZHQ?IOK&{D5n zC6yw~iVK)i<_<{o418WDgxHQRkX=FBM zjbrJTjX(1D6hp4wlsTD|Q?TaG>u|BZ?i*0$q$F-q-6Zg&hnrxc ziMuS!K~JwZP5b$r&o+#GW|pxiuc&F#e(m-HE3tN&K!|Q3*T1m%s$AlPGPwW+r^z<{ z-JZF2=$F#uf~WPMWQ|svox_U5k_jucaGKt2%!WvZlW#mrkfI#M)P2^{F1)Fvb#@D{ z(@!PzmKHX4h-C_vOe&|L*)uVm52xm4CpP>83*Fx%l+yRZD(={z7u_894?e^JIU#; zrsZ?aj3=U^U#p`K+nj_nz!`6inwsqXh&AVs(b>hO$h@SELvI=&#swdDrl2u*Hj*X* z0$fO)8lnQQJ169+8KAGK81##+PwG|uKnXVxoKbTp$FHFbZ>K$(nPcWS8~Z%-72+nq z_;eWNLAhNBU6B4D5*A^g<|w5VZdt+zEUfBx*UnV~C%;b5*^le>57c2B9!XNNXZg{R zH42{`Wh^4&GnkjrN$UoW5YJVq4!Ec!Qpw45D98$m8=8>^5G}O?6g!h04vnhF5zF=D zvGj-DB3x~R*Vy}3+08|3YT8?zIvXOCCccXNCXpM0vZo>T>46A^{(*fYx-au%iU#X` zpBw)4@q3w3<*dGuzO8jlrJXnFfVH3{%mvq#@<{b0F2%AUw;S#2UI3iv$I5<#@j#zC zCu!1~fky518bwME;rZ#o0}BWv7-hE!x+u;PkHl|wtKd-79IbYT;isp`HOBCZrOhok zE8~Hu^QZOljh6Mx=1wkW$Ya?}EjjHszpsP3-PwMD!!JBC21AxsS9a2pt*$z_iM!^r zLMv+_&HzX(*d8EI+Pqx!>laS!*(O9NcoIv8ZJSNIh5ZV@A7Y5-st73}O_Wuo6o;lc84p|DP>@)V|K6Hl-BHkRBa zW?hhLfN6wde2!D@mhdFLBJr1-+%m+-Q<&(O8dj* zspk|9&2;-Vk%wUXYA~8}FmEDFR7U6hSt%@!k&X_r@|JGRXl3vxkNxt0J7sKj*%vvB z3hmPa&dm$N7_3Dd&dda#G+`pJKihW%Ae)wdw04gv;a`?oz5q{y{+eW4f>W}@Qy!GL zH%B>RcE|4Wf@>z;i<6D3+LZ3_tT;lOBQ$>grW%s*lk|?K?84@#Fpi8>UhqTaY3FlI z7_2NB4(ZnSLo93!!eCQcRvllF`pz#Mckz;1S(f!RI0D*KBu$DX9_78NG{+7QIw#il z5k^xDEnS$Ui$9Ba0FWaVR^{!5=3-f=cA5z)I+?A3eKU;lm-o@ug=U0oXnLD%U zGc<@xt&-39fLG{%zjkxV+QgqqylvjC3pqW{1L^b`WwQYiZn0{&Zp>+92QL~0=fcYv zgpH9!HTI#}$)%+Q0*>4bJ1czG0sJw&Dt2RL)2U%BVIcb3B`SdJGBBVi)y3Gd-mPH- z5$$x!UgM)@GT9EBw4}Bvnq{#!Q8Kn+5G0Qz%S_x zs)^5Ll-XvN;v%yPyd1uHYd2#KK%* ze*nk9)e2sac7l^Z^Zw13Cv6|h7u>_X&egk_#XE!iCG#$nD6;sF)Gk=Fub@`OLmb)% zMx(2WSPk-+pBHZS`%~{z+szG`#iv;=<(#h8=U5&^BjopG!6LAY8kR?L3(jS1aULglir+yX zf=7X&X_B87ICYM72E8MV=JXNi42dOi4$-M}fIifUM*ooXfeix9_4dO##)$GUYM3po zU9e@JswDU-sm=6$Th>7qAJjU68{2TK4G!h{hp#&gi02?74y^0>4{osctdnU9iPu38 zJ1T~Gs-QlwRXR?8=b*YJgeDYqIMhLF5gTgl1{K}fXn9j$Eq6mAYA_vFk%NYCfxI?U#z>X}mLr|NTBr!c<57HrVyq}voObC zHXpuXTEpJU+7VSkdYStwU<@x4C<+e2tVBHv$m2<(?`ve4epmo6htwAQN+#2(sz2bP z>@8_u9XZQKOxCF_zTFiuK#54qL+sFLN137k$T1F__wvF;*NoxxmEr$+fdIaP0XrZ! z6bk*jjK7VXFZ&=pzNngxuN?jN?M@9p@A8j{`rk}*YAQO}CPY}wW|NO&+XUDhpy!h9 zgv5)H{t6s=(SUBxHUW@bh-eLW>?uNiUQ&3aa?}=LS2cR>vJbS6AFgsd)!c4S$rwt>*u$JjZK5C@;EI%XZY16_KX1?6+&6U} z;C%$}J^;K80bgTAT33L)17t@)eU0RA*@^wb@vEhI+qKMM5l+dOPRRvaqa?Vy7ECQ+ zKV6Ea6k8IPjECD@7Zpv3W{i$ui-n|U_D5VcBFd4`5gv;L9{t-L1Y|#Yy7)em?Y9hF zus2RQPZ^h=jA;GQvp1MJhujiqnE`2PY84VUv7Wnd7lki|qc518Y&TuCub6 zVcE9bAl4xM=d8S$Ej9oiD^PfwFlP=E6wcI-86n!Hz-Ui`g(9@+U##XOmsHpwnXP7A zZ1#;hi7Lt?Vjv~P=;k9-nCKdNULYhCi{kcdYUd}A(ICkubI{h{-Erg7KRHwzChrHqpcvJo{Iz7+-fq>POdhbt^a@vX&Q z>ntYs1409v&%`yM70=AYZ+?6(N@4L$!zo~f9wBfgtm!A9oVwg(xQy_1*uju+zqfBM zZMQ)@i|$=-4Jz9lIiir_)l%By%M;aTg|PS?CtQ4^n5djXGhb2^u`r8Np>gBHqoIME zSFosF6u|B#sgz1!PK~3JRYoq6)qfISR+lh#q6qJcY(8plVr17#qiK}LK8-yREy;tA zs0_ocm}eRQ4a4&TIn{N+ylNSRaX=2gi~#Y<@`b@!lY&0mUmtUFQ*__;Ery|C_e|cT z&H8NIJV|e_uK8#WTni#il~z1KjmLB%-37XRZ^S%fwVhv4+j-4tq6~J_Zh^ynX{keK zg|5%~v>D%`B6DeFrhL2%w!X;v=Cq#E`^@vm0%zjzC{@XGuKX#oe}1Un0;qS#;uX}W z5PBBp$uiuI{eE0@XYR=&TW$~`GsYuYe6cVdKB@|dOJ_kKJ&&d~e^`Xzi$u+E{Zzgu3T1aD8 zcUL2^!72za#WTgk2--r(8`jw=8ZKG-iP{v#E%Hr-3pHqY)9yEiGbJ_4`2uUnyl1&CC8KboUii>;%v)S2Hcwbnoc9^~npT#*!mwB`wFdL@O4wLTP9 z%#7-M4vnHISEquW{(MxeC4-kw7gw2@o^KduVF5Tu3~@57K|)X*xao6KSQ>pGTFmx|+`c8{ct1T&)2Mdy^g5FSSvxqP z*KK>yyrsGk!9z9s=5wVw@^ZKm3FPY2s;q1e$WF#f(_nRWvqg=#i-KMzw z(Ac%D%h(vA$@*`|oIufl`Am$T?T4|-llm&=(shtxPNMEH?VmiL@7*`_d^i<|C2RZD zMCY?Q;yByjdWca~2Xkjy{JzB)0Gw4F-P%YM=+t^ql-rJvRisND$ef(NWW#=x#BNYh z)uK9(@bO{xT)h!)zW5DhV%ES3(}BqS@j2I9`@YeJvFQopJ0i@(!i+bkk1&37O1%Or zoIqs$9u0sq+FHv4V`nH(b{Y7I~PNDeG(2vD~l&A#LraIqDz=-5M`Lfd6`qX^#L+JiGL5AgWq=xrS*8m3x3;7>H+<{ zCV7ko2DzKT*%K&$pCTXaLut2ROoeT3S2CxW%)A@}ZmAF=%4#L3l$v|O%u!GB7c@xh zQOxR2X=iM3-EFem)nSfc2|jsC%WXt6V9>t!uy$_|-dz05R&B^gj~CjT{5A`9;61r) z#ehRTYirX;?t#;IH-oV8(2u_kN&71DWUa2h|V52NXxvfg2E!z>t8?)yZmRJiGN73fr*sm}KM z50@NU;H+OTZaUCy;PmqFTCOXplVA$NMG3ujiBi#f+7Y-hW`K^6PBS;bNjT`X-#B1fAHY> zQRczt?e9$t5ax28qT_Z65$j!cJ))FUSMxIsw~0ITdXWm`?!oM_+NO^r;AQ+B@Yd`8 zGMl;PE$}w>B6sVJ%gfh_ed}Hr9oRh$UwO&eyMe%sNce@|r|AL4b-u|FMmQ55F>gW> zGCX8*-B)M2NLiy_A6Y+7FmuoRg5$QfV%737#XBW88*bc+w$al8meve~V zd9L4KjWW__y=A&Zbzyl?)|~*JCw@V-M}3t*ouHPm5Z@4Q#2Hks+raOVbV}7GqnDqV zub11Q@)rGy@?z(|HW{FF4-C}9=@vzFF4{Wqhof=NGu?(t*5z9u!`Ly%4}mj7%@xva%J)Rz=uCW;0u|qL7p4usM1MH1l<+83cijSlmRV~n4Hlev?@x!XlGmHIK z=m&jHAC)%WbwA09^Y0!gEY!eeqgo`dw~$lCdw1sb!N6cWYL}^??y-*Ex(kzIZ2B5I zN!E^NwMv~uJ2?g}Jd=?k|CeaxTg)d~*v-NYrIm^PEA~-UpR`|}R>*bH>258Ng}ls3 zMm8l%LkKo!dbb~6vT^U4UaqAydp=T&9;XL*HGiVkr=IBhp&GUNTdg!e@c{k!PV9*w ziL3sUc9p#M*J`#8xf0{p7QF)5e1DK@>Vd`ApKZZi5z3>Wype7lD}>vZ&x?Ep+OXPsuGvvkBsZ_Wq(b+~3NDxW#o3t10e^gz&0&C%80=LwrKvtQb|xKY&3sWvkfz16<^ zGpDoeKR=#{X&djP4Q@vcS!12Xqh~ORTzJ=JyR3Kr`fC z!AT>@n*Yp_Kl{%0mI-n;&-KSsK{<=NepA~SR|JYij03XtJ}-fajn@aMN3>J?vq_LG zt9ul}hNBbGUQJZ+$Xl3f5|MXjaLY<;ryIxt(KPCU!WNO~+*4qhis-es$IlQBP?M&R zs!*{E%|X$1In;|@14}h$`Lg8F?egQM5Y+vdK!pgz6KSkoRTdN!H77?n=quibw9%mMpv9WzK2+#4J+d;la6W!>W00c`y^CUFPzI{d1H^jesUe1_C8^rz zIbYXvH9B}bu5G)sLbH36of?N9rz#EWwVy=$N3ajAiifz98GH2O1mrDL&}P3#8qhj~ zrC(NW=8+9agg_A~wQ)(=FZ!jMcv>qc)Y(r>9v=SWXVrtFlB>YxYiFEuZ*6RruZAtz zDV*O%zhD;yiV8#S6yn3@@nqiau_I%^c+VW&#+%nxHim!i&b&5cM}^_5Fd>dh3?=m> zjpZ9)6w;Q&jT%yq5=gU)&0MlXI=IN{5*ENb?J;o(|3W{Ik^bXHEK-f~!$f>ueCL<> zWk7v1dYQzHTgMNlt)pOAsAt=5HL`+{k;Dn?d&Nh?Wt&hn_>gJXyaW0qaf09#CXWry z86s`GAEROjH>z;u`s2uH|uXR z9UmN8;gE5pa&qZs;S_JG1X+p_cF60BTl;$JQ}6+HGmq@%H8)2)818NM!$>Lu(*u$% z`tf_COiJGUentCdA=v8)ZSY8VZ$L&UO*3zcf9@5EogHMs$m)5gwvmj zoYV_L+fZ;(?VD6FLu=QM>44tfzvD^O9?W!q$S0=7ZI1(R4YsxYJ(-@q9S#0||EP6U zDTmwTt_mI_s@5SY$P_`VXML5k*F0(cWem=nQj}=7+Ke(RX?xuopc%@M#0Q0=p{=W` ztgXUb)tGk=m?58uY{WabTa@PLXLo3=B!|ry`N<&lyzzdE|5k-aa?q``VZgyPj5>}q z7e@)$VbQ2-ntN7%Ac)yT#dJe321t>QtVA2>q&_PM*ag#iGK)E(qhg%<4K!lmm@?u_ z9W^_MlEUy+ABacc||N8P>0!{BW(;8mTmls5Z~8wgucWx@kkj<HBdDq+JINjTzg@k+z& z`oqy}a*3NX#@xQWjfjX$YrVI?+c^hysE^Z0U_oX*c$KSi8snNFqn6PzxmcW3rZJY4 zjGTDGFMuM`4c&tfgj^G8Ti+NE`vzgojDfG)c~^Mx;Pw-jh!J!gbM~4cp0-JSZ}tVu z2)mTOQ>kb62q?uSBXQA6&o~97qpt@%QDp6P(PC9+gKO?@pRxLeyi^N zqkZ5tc1PssDusT@+vwZKW6BQ75ydTtc3r(=?0H^SznXcev1dFtj&jCkf}w9QYYvoGZqtrgOZixhBoSN;!^%d&#UL|GNH~V23yPRsXc$pkv~tk6t#JY1?V1-}7+wmHm_v+J9F|m3`w+|Qt`TxLE@U`DQ!-M1r5*Rm>Q3^M zF^$LrMV9t$luR`yw;_*WDXu2&DmFAIan>}7I&D04#^m?fC`zeuS^E2zxEfTQm7P^= zDwy*z3X+(SZ>z7Z+;uf$viG`w7-p(BGd|rOH%P&cOV(8vRcADWsp^!p&rNE!O0K7@ znIBbF44#ha#JMEAS}f1qj^*hj(4)60U2ELxRnI(&hV_`(12ypKjgwlK(*hLvcXY+- z2ZsqX?9-0D#kuca$^Ct?A0T+0SP;K1iQ6QII-ZgrxDhIgth6(f~inT9ZbNW8;tEaB(Tj&*X{o;%KuNUss+`{BLgNvvfCkF>&DXsaIMGKNFf_ z5x^)Hrq1Q* zytn4as}^(t6@ne5<280Fm!Vx!*C)2$<*ks$T(xpIe zU2t2u)g(jZP*Y(1(5yrCZ{=+w7McQWItnydNPnLw$6HsTa~Rz|OIMkSKCP6e?a$$; zX9;%Um3?`BSV5Aw2z`hH1zV2V@W|ocKlKZKH2Ucl>GbK75e>)S{HFhL*W*z=5>~EJ zz*uydyr}&tL>qUt#UpWa=v8OPY~RWUfh!8b(?L#k4F0r2FeyAi&oL?Mwc2{4+*$8b*Q-#SPgL$K! zwws*byFS$$!C>1?U5Wk3Y`eK@nQw#FGJShl$=9ldJfywepv#kJak>4qKHC0pOuZns z0l@S#Xe14tH6<||vNNv2U6DIh0l>0}+JrsJZMSS8Z$I@b`|4rf8wNG%JkuRfat)ts z-)WS=SEH&Nv1m-sv4gfg?5HG3)k|^*$^6_RNtaXrP}?C3Wb5OKT$>?q=bQ4^OiHt; z6|NJ4TTd7Hyj2xmJClCdDHC)3!K1k4Y=^q>u{wTxCqiv#Wz{AZ~;5G2xqVh%J4AI|Dh<(7E$6;RZ2+N_e+{0s#cNVsUlw2;WQ%Czcr%RpzAup;7F#t zFloy0!37X)DdRaTIM#|cVU;Ok9nz~*lVmL!7=OIQWg2Bh)U_grxQ-@eUi0?N0n{16 zgob7SSpjVp*nlFFJglles-0zRT?0^EuA+I#dJTWge${$pd*mABs)v1lbH!;8(0eMl zk5w05{e%z7} zA4$350+qvNKEfj+e`E;apFYxRb;OJrvgzocJjM<&LXxwe#-ZdSa$`~4&61sjo)*EjUin@Vt{KQA|=`Nlf z-6?4a3H@A+q*#qh?V}bHVqz(dtUxe{hcl87|J20*N!0^-EYjt>Rx?``X5Xn5B}r&{ zBRBHr+#9$^-i$2TvJ0ElDuV)DKHUpFE394OdIx3-kx0!2Uh2%q1p>tT8)dmj_zj8$ zO5|zfij>y7nvbkfqJiHqMf#T5vr%0eXC-=P#TXR|bOn=Z&$Q-wB}#E&fHgD?y} zA=BY+p{^glekm}+$MyAB{_fT_3iaka-D6RQTwN8D!wPP(EO66&DNI{{?TB_ zu0A{nBZ3}xN+CXl-#kLiHcp;c-Z8g3g8)bP5j%>Mhd@%ZMCh~7HASugVVCMpDL-+X zE1%|+V#hzv<>T(W3MuuU$X1r-~FL*#xma6t3?s+nHmhq z`BTuI)R-fo`l9VTmT3hsHiXJ6nRaAbaIpNZ=={Q}ar&&!$uDYtWo)r+3jNXx?m$@& zJPVO6_+(8quWmAFZJ1W2o$D{ED2>TXtr;#KJTHKKLi-W#XSVt$jBrIuxg(hkgi4_m ztTmez*xS&S2#AfeewVK8{>^!#GWsChFBDpDNRMT%%}$)x<@YwO#WxV=>pweime9_J zC##Ri8yRcqo>C@HHZl4#F)KnVutA|9>~ag;+(E?*&CHm*cul#XFBSbIDY0LrByt5r zs_^;wkb1xZMUYJo!Xir7k%UB5N4sg7@8osZakERg5q%6um0GK?E{xgJiqueVD&3^} z(wXzB4+Uz_>IGrE1C(s3Z4k%pG(Tw&9c_1Ce=QZZh5n}Mj4t4ldrWFB*3meqywbBU z?TimRX;jhzMj$DQO0#i?gUc?keEOx)bw=5BC`uWcT|f!5M!eBySO{=&BUXmPdLPdc1*mN-SZEkmbb>c9JbRoTLQtI{S}<>K6|F&8$=}Kz_xWWKb!shV`f_>s_6QC zp30zVH_orNDc_pQ+R7pmlfA0=R`eF=tI!MjAh9X9l22|=#!t%ePJb>&bI0QWZZGv} z&+Ja^R=3x3_`0}bP+G^+na*g09i}{LefarVm?WiRW)5~&3>%ZbGKww+V^#m88>auN zz1f;wHPJraq^q8!TObCcR8o|s6Swv>)N8D;F*;&tvb19TyFvG9lPOqEEqT0K zh!&ZuStil2$~604ocB4p1KBRLXUBzIyE$j7kF+$`VA~ez!LImwWWlrQ;U{LJn{S_h z>CZilRK`3L-wK)V3tc1K#$CN8YM?@9Z%RFs|5e_XTXIW;+*L{?(D_ez(<~jW8WQN^NOB%3=H0C21e@8` z3*3zwD+CO^Dhw62IS)M3L;c*9x~^KAs`ZUlg~e>9od_Umr6U7eFagJLcFQ2b<-5d? z$T{LCtQs<-+ds6!i^&g%goO;-$HR8;UvhV837U5E=?}uzzumT@dB1jbw?1z-+s4n- zHp8R%yLed(h}qvxRaB%u>iKgm3po{!*wA5k>~wP_j19u~YQ>N`JP|Et8MSpMY`F57Z{Bz!~b(wMAvf z63iRrT$2RPNeJMl{v1C4C@(*6#13-Yq~@0nz8OFmNM2-8hWWU+#&*JP0VUB2mo4Wl z`Af5$+;(rI@e^uV8g327Idu51)o-nxPov;uq)`%ouKiL$OdW{9ph+uXDmK^4d{tlVR>9Fz>navRb*E_tY)WWwCf zqTscd#8lv@R0X2fi8Y5S>`;c>9)eZmD3s5hG%AF$I!XrD6&uRsYKWyjydI)JSi_3i z9wT=w0XfuntF=!HCDBMdU+Sgse>|*?fViJ0F<4(CF4w*#E!M&)XvJRqDA7IVa$EkpxVJ<*)yO7~j#YHV|!-5}=ooMWC9 zo}#IKg>Ct5l5^z4$_v43zHFty>a^9K6N#zl98&c>NRFT40C6CxJXvruW`ZI7XGT=P zfITsDu7N8|-J2oQC8>}QdDcEGk5DOc;V>G0Fk_Cu&t$0SayEWfL-41`Hr>!S*n)#1oHm}Gk?C zB&3wXrVd~O=g7IaB1c-TIfIR?SkWb!9EdULCL`}Kt>{A?W;BYo9`u%@GWUuZ!zVc$ z%q|g%<~($CB_@Z7vqrA<Ai;k4N=)lij*HI80y$;Gmn*j%{kYia?XKQEk7*nA$mw8r7gC;FGlPQ(b@q0ThGfVxR(VG-Fb>*ut5vqQdoZ zqZ}=8@EiwwImmS_?)o29NjN+*qOU@_PDezJ>+L9N*HRtw=okKidE zPZ6J^>)EQOjnNK>{A{{ff2&TuFE1>N#4WxB?QQK8YvpY`#i9(ao?;93q5?coUj%r4 zZrK@u_3?$4Vg$7Fc?A~oc5g(8KvzB5bg-QfK+3{cCe|WDV8$Y9u|dXO z6Q3Uy4{<3A5q+(m(zc*FE5rsum#!xW1p}nMj4RFo63IMxfkD8J4pGa$($Kc3|8ueQ zFWz6jrlcjPY(mQ(J9NiY3nN}z$SpVmokKUYbii*m^(Wdct4$Ojpa2+t)Sh zy`jFosUy>xZ(g@QEnlwho_*==g?`T2+5W(iufuX@xM`qxML$ZJ=Pt~hrB7PE3WA^k zesGaHj>jyR0|d+l7QX|CHtheR1B(<)6%Gtke28=~Q#kPXPJF(@A%*}$S$#N__4rLI zI?W#dl4iB58!3+DAEnpzY63z${dIj(coDfhUK&PVW}=Vip?xe8}NQXdXf-h&m>5?^NyYovJPtk<6V!0zS%~U?;eDWyX@| za+cea#I5uW=8N<-v?nvF0>gGG0-~&>fNK1z zDYConLN0^61VzD(8>OcLfKn@bXnc_3tP-hyTj%kLV_0pyRuz9_#*#~FIgck;o5IAn zg$s8-^p#(K=e{R2T7!1gPPmQ4X8sZg;$VF zV9cpe6j#Q5NCTtJ&rjgE6>XtWAVEPwFPC1JCSW>+SF;@VlG({y8k_SVjx8K=uD4JT zUmvr^vWD&BHb@&H+>MqUmQgUOj!QpLPAgO3O^bB_Tn^WXYa{*f0dXL*LmY|R>;0;4 zzh}SrIDC{k8h;8t4}Z%2H2e;KHuA1=9*Qj0?OpG^PrgqXjh~Bi9tHkjZVD*qB_mn_ zQGjXQK-d{jM^y@_f~t_B`H;H*k~-Bn)v4YPJ>!P%p9VE6haT^>F@F&^X1lkD9@4J9 zEIVM2W1oX^WCcO-QN4BWP#vtRK!@o)$3nmgZ1wxR z7PhOt>m>EHYH46(c-3@yWO!z{e0G>XPuEM+)59b{ob}e+(6%PJC3;Jg{(2O#{_sZR zKke=9upJJUF|+^*448(2AQmOWt|xp&iy#ysP6`Fgzmvy>Vr@2l<3>0vJ?(Z|Rh6~| zE8M%|gI11?R`|%+AYB8a_oIXzt9q)C2f5lxt0a+A^<0%15n0mR+G}@z>um_f_x$gc zriC$&Jsw}YbVCZ^BQ#RTb0Nr0z2eb_w^4ydZ8<`6R)#cGIy4- zAkLFPGc|IFUP@%i+)A1i2?Y5n90@dm5)Bdx4B8cviedCABo$N0+34tpW;M?e@fAws z8T$W#C>9TRs!R4_G8BBK)rImbKhThwo0@2%f-o0m(F@j8&+AC@OcE#~-7`;!S9{S_ zK(W&_zBDovg=TahN=0Qm`WSnVuwszG6o~H7k6VRtyU~`y?sN>PBKK7)J%rQPPz zPNhe=R=L48sNA6(0*93EaVNPK8|?|+mvmg_jro=YqRHT8kyvzzj4rb?O#w1$auaN7 zl4-jP?5ab-qQ@H;2#yAi25C9CKS%}NOZT!keNScbxP5wgf$hq4?W*OqtEOj0%AGTv z_y^bfN6;f9!W9VP0^uU@)+Ka?PihH1tpJ@tn-r%hu+h>)s20}N8nk177{5#PfEOu%!bQ5R>VS|XUhv>*RSoOZPrF9DADH^^ z$2)rv?bhi|=xKJVL8;k3doIIvZpm&~y7}1P<{OtSy6}^qz~!sH`)$I$7hZpOdBhVR zehI#|WT@D4)6ahXpBVWo5eu)Oj{!f3(&LvPKg9=8q;=qhK)^+s+FWhGFagAqE))O) z{)*t-Bw=9OF+Cm+>H)i?c{sd!fWqq##_`o0;cRps$5iHCC0C%%&!5I#!?f7#gl@3< zM@YlyE0@az{zwR9PfcEw0nxxHIE+Y+o}c$2H&OkI)m0A-80rG2aK|_ra05v69_ASC ze&##O7(?Sf#i2*T9-F|h_xt4-dQRL$PmA0GKP_rdeuxgaTypH=;z2)k8taGh3*|D( z`tZG?JH>8chQ0k_IXnOc{IAlMuo6KMGg1tik)li%UUhZnc^L;DnLsg>Cx_QGX7Umn zwr=oU8|)8l5;xazkhZd%m3LSISF(GlFSC0b_X@kCk5EsF6TVld-@1P%oTvUT?eh+B z1N;zry1mxtxu3buaVUOr&U+}@iv5X2{?y%OT}Cap_Q-uypY?ibgxc%d8$RHB)cUBc z!cSO_*?vm>7d7QLZ}an~IS9DZ95swvJa+rhpdI5_?oP%Jj9>uYs?S^W_6K$a4h7Bx zm_R`K4PHYwcN&Ey2Fq(7ZeBC5@D}kHUn4WS*lkms*DW2uO=L#@jFUA$!WO@x3UQI~@m*7%d^f?!F;XARSv2=CC zx@wvUr7x>yhOuB8E~91| zhH)1E-{`{|Nn%45`HzrF0f+vYJzjK}%|$1A;ja>mrEr-5H<(Mtr0SkjeXq7>tM1vV zdseb%x~#>35H5xlPtl29gfc#sVQj=)!Ets^093d)Ck6uny9a5r_r?c3oSmug^4OsL zVqBb0DST|TI321*NH30%nwqGVvwsJ-Zr!tKce5P$`Gb$W_m>k7y*RT6K4B5UTiVv# zO)Y$R`}Qrj`}e*9;cwrA+%LY@v7w>eybD*gdH|sBuzVTl)Mb~jg4RspVY7+DS~IC7 zNIDc;EaY8j$m4zJb>Z1;FLrE~mrzD^sRxV0pB2SC8{eSCLIAka?h2H~yewXmJUuB) zmQGCz)779ii4%YqgctGe3uK)@jQ~ypH@OC&OU-DSZ9t#Iry+4*$YRfhBoiRF@tSET zUQCXo{oj#{z~yRgnh$ZW;|==rr%qv|jooimW~;y=e(P`S*Qp5a8jtb!*TeA`SD|@Rj>itgo{Ld2PlFbMdM`@& zjmEUs!}6R>K#oy?S3k86CHWQVgE82sSD;`zQ)%dRdxZPlZrFf9{?x!gp0phu)wWcs zwhb+$HG>f~zuyJ1YxKK@T$8TTF4h%pdWL3ct~S+E1@o$D1bLFpccSL4^4SqmW$Wym z8R;y|plm){TRGs>lKx;q3na8)Dgu)Jh6tQrD33xJKraN) zPp2*qkBx1Z82z3t2#*DmAubg>&w7#jKA+_XsYSR^+d!M~^NppLwpnlfp zYq{~&%dbEoVbSa|`u8ZaSO~5J8|fJ`lkx>`u=ksZ18p>D7B*8m8h5OrKqH%Buf9)V zN(*{6Z(DF{VrcU|W}oHm&^_Y5!o7>{Ub=6^7kj=D`i6L*r^1}HjEBa>m+~*InB07N z^VH^Zo25D>kP{02HhHt<-}vsfk_3Wuo7ydbaM#8EUf=5T`K|n@2E7`trFl`@)36!+ zjuLK7yVqViq#f0s(`c;%4^C{*M^RddPBoqQT<;!Li!M>oxu&?k8+M!M zXt#+DbvI#`>n00QAaDA%@o*Q9IvzYlP+$*q!LAD3VmiXzwrm*ohDXCx_=nW50gIe+ z73f57*;p>T8m?|?a<6)pHc)(xp=}YYq773P47knMXB;wUL&Wjia9~$53dIaP+6Vja zvpJEMqt4IA1wZNhk<_jFaFNC4L=M=e$tjp3*bUX?_oZM@YA7|CI-O!tE_`NczOIZq ze=@z8Xi~Q-n~lxp=EG>rEt~PB5xXP5*|qP1WpEj(cr9yDg3uit3jPuWm&)8VoHC;VjaWD0LU6Ye8t{EgX_c!Zcn9 zg;dRmfX_!Kqo_3gOZALOp|~`1ei|?6$49g?8agsEUj?rf!AUWEzP@(HiuD~!8w!zV zNQ9Pz*4mP5$+yt#Wr?0dMr%y0)A}MXvM?3_D+;R=a5*d~V3DO10lm$uBH(JhPk~Fs zWf8bGxjq8dt&es{=yYizSlQC8z!lwvHk0a7@N6X08G%=2S4Y5_^lAkx4Ru9GQ9+-d zzpZUP{(l@b>UWVJI8w&Jb(n-w)0PnsMhk)$*X7O~_ts`{kMsTt_kpPmBB&z3_VciZ zhb10nc_{PH%|kEEvu0a&6Oh7P74FQ$U^v{(c9I^;t!{GTWf0YPNQS3QIMsh`x%NZ2 zXMP5ktY5-44K^OHrI4ho79zj3YDbck=Fl&>3RWkGNy#Ibfb6AvXur_6>D1x72Y#fx zXx2iz_1oHCcx=h?CRsHiL;t?0yzMjJy72t&6?RX7>(A>&80g-*B;UL8`lY$q53@$c z)*p^Pnae-)2D~c$^^IS8!L+c}P@T=fUNJO!!k;MmJqpLr7OQjU>ftT-uWxM=MeTCy z7P&=^UrX)TamT^+myg_WXw&~=>}#Ogwyres0FnSfkO24#fPbJ!5QO+AiKHaTY6K3Y+ov06{o&9;+B-gh5JS?3`U*;9#Uf8 zDghV@N*?C&G&JNh^@dqS8J8f?+cTt(Oj&_G$`BZWR$O_PxB$!Tk&G)BY>_|)(q9>2 z8+4e*Kp==W18)VqnN%Ia0OzKyhi|g&t|4 z=(L>0=JCQR%Xn8k2m`-C_V^RcI_?y@lue$11D*f^o`43v<8e6H*&PxO835J_ADXXFyW)ljxmmllC>*lcAbwxB~;OSn0Y5zi;w zY67vG#wGzgv^AQR-^P?`v8=ygbkEwY>g z9|*6>VQLWj7`L|g*DbRdZQb{o;p->kp#a}yFJBX}dw3?jwyXQj&5`E%<$(`m!-|;J zBGmv2x*uJ-?NjS^eAxcZ^55>%l|(LQ=B4Y=+ArTyt#4aS+)_#Bav{Ed2lD~Pga&(g z5bsz#x_ndzz3dB;CtHK__4eR=2@moRR8Bk`(1EAJ z6T-;$6+F?L9hZJK-ts0nk+idT0AW2|TmXq|j~l;EMofn6p`VjmwzMk4!%oFk`}#cF zZEzwU^U(026s2c6*bv;_yLJNJ>GkHaelmLeGz=0y89i`k&gfAZZbEWJC?hcbL+X$9Bsoc2a|YDPo0$g15R1)a>}eh zO1q+Jbv&Fxn>+kgDu@S>sSfL@%vwPeLeuzGniSO`u%RG7?M2=u;DY1VHK4G?p$2o8 zAox{(PO*zkg~a`O2kO%b8dcDQGO0`}$CQhTTREOR{#Ru1kThf%F&GXkI*=Qq0mxlg zN$7Q^O*}9hr~Dbp!*!pw@kOw5C`^V#s0F5nv_%*ZtFlZ(E7ITFyOQsVrQhmYv!=6i zaLt2i_we#{*Hz+PHl-yB0TgyW3I_%|JJ&2{mh>G>EbiLic695PO1diMCMo*z?aS-Y z5%&=++ZMXGYVPnALd5I~rQuh;bP|+4skDUn^|lcI)DAgP?2G^&n*Hgy%gf{~xN?EK z1y}wjc}p7J;)S=QDUW4>R0{BH62`v6(afYNXf6N~2bcDg)&|oUXSWLrG;K(*{rox@#eJ-7z){&^A z3H_LUQD=0W_*>^L{}$WG;B7tLS^S3#NCwHDYejh+D$Tl&tbJqaB`hNtCW517F#Ld4%<7C3W!M~A*K^36^H{qLv4!6AK?##YgI@xTKfYTD-;I@i&&JVUOe`pUELWiF5j9_ zD$y&Fc7z=6EDkQ~?|<~|^VepxJ$`nlvGXtJ&wZtsA<~B6F>@{fYtlaE_p4;Bs1f*~ zlD$JvlF1%9WF^55(h`L|#wKqPd4h^vbU;e4SxKW7JRw;N84m$06@ou1Q4qvR8>dv- zIHdx*89JyyQiDXABqXO@c|N8Y1@b{)m^_2gN}g(9ISuv_w9?y0F)$)AW@0(}5S57sR*dx5P#H zH$iSl9#V$V{rXUIy?B7XN93w>r=%NtzS`_Z{Sq6Iwxb)Qn{-!J+KG0GZ%DtD+?(W$ z>F2%A^Z$!?$GlM~olK|K(btL|pDc#_nvfJzfwbpFX1nV~_n35(d{fAyipgXueIxB^ zO(fM`g~&gW7~X8)@8|&m3HRf1#Cudz5b#I0&=qmDW-1UvhSy*kkeLBj{)Nm8l}fY! z3a%os3h>lfJf*D!aPb6YZ=Y?k(VTe@1v;L9J*kSD8i3?KHh_+UsV;}*Ck8| z^hE)k6J8hSNnsx1Z?V{nf)p*?L`@8>s9d#5Riw&nWxjH*;vTJ_smgSPu1xed=g=ol zWWM#sL=Ege0DL-(Zm*XP0N)(^lH>aauVjcX!l+l_mcjJHf+Gz=zPLkyBFuBBz|sdw zYff23gE|by7FZStPzMg|hw$71qUO{Zqgu~hN zK_NFqr|>!jP+p#t31z+57TGa`)r{>!lFpd;dlPuCLs{oNh8AVa0->y{Cci>i?`c!k zkqp)rP;DETh{waSA!3cy+8OAmy{JV`-xcHsDrnnVn*}bDM~~j{@!_}MzOADxr(Uys zT|Qn|{-s*kx?EYGjrhcXu0=X!B)K1bfB%oy1_dFUq;;LHta)|$AMejp1AH!rBB59f zeQf#MSigdDIoTJ>e2}?zW?m zXaPKa+`{qbDhTBm9z%myK+`zdPyCVuGo6f3 zZk8ihiRD*TLqS)Aqqfl8uhus#4-HkcKw8lXGLqeozBjz1KS_WPV;|i>Kt7q7JYCEa z)J_+-YG#U!5es^Zc)epX`O)R=BVFw|qzis(_+h&UEfRkHt|B?B*xk3jeb)N+S?ft= z7@W0!c*8I`Z<=BBRq67QY3FLlD8N82$K&Q z`XqQ;A29F;gS#&Q9rqE#9!~9}$#Ig1)+dubWYUKWv?-?#y{i?`e%|Rr?p$G~oB9{$>s2hRLg8@@jYl-^Z^`J3;bYec&2_=03+qLMrc&qkVn5MgTWJ<4AjK z;mYsG(UFm{U9BM{uk0m!v6f+4ePtRlo&sT(XS-uGF>{F)Ms^CpF<5>?6WGxTz^*|n(UkHqx+yOx^?GpZe%3A zb#yB|wRL(cO>LF7(p&L}KNXJFw@&ODo1<^W^WY(64(<6Yfy67kr%T{#IPVmL*8?_& z1T6eFME)}T!~qIc{DClr&F^5>^;FfU zdjm-6>Al^`2HtQF&T;&Sy*@0I3JGA;$b3Z@QGJd#;$QJew5BE!!(iFi72UJa%&u}) zTm_|TXmn4o{71DN4@5rp=$1|UGf_X^`+?=b(3(t)cg4*ejk`u@Ix?_+xqGDPb7xB1 zdK)*E)$T3JYleC>0=P{Pg^TosJz~Cd&+bod*|L4$f#px^(4*;GE+%E=QS_&i6}z#~ zS6tpg92$6%y8*X#+sRVja^&XTcrF)TvmM>?m68L@vHlY%iuu1-|J4}RYW>$B`mc*1 z^KP;aaAGtIilhRsvdNsq5k=O@7baSSixNj;D{n;jLjt)&FOGu8KM$uzsU&%Gl5~_L zT_i2yXS4`}x7twMa=;>~yxYo?z#5T8W#sU=tXINIF{_k}=Vw614usa)E z1GGk0W3(~Rm~2cp++`QCNqq{xnr(Py8|NDIYy(Z;hWQ4QU!18@N(<*U5irdh=2lszwIXNCp z;<34554!1_8#OXAX*@BUXU^b}DN;$gylQMFFc5W!wggZ*_NWBZenE4=XoG`Lu|v@4 zBB!}x2IrFu%9(*3s-)vxxk9(`!f`lHgEz-P3N=@l$Y@xQ2!@mhfOHYO0acePB?t~} zhD>=Nj(E zgf26aMlVEupwV_3ZQ|Nj6C{fn6|wEI4GXUeorC?TYr-|@ns&{)Jgy4@ghXv&2AgLq zOc7wYgHyY>WwvnRmf6t{-u$GH+Vo`FwdsbNcRdTdet2jhJlqhb+Us`xEu}F%l#2>8 zJ#R^G#aF$=v(Q-Uh9DIZC4)vFPaBCmZ+P;u7}lvI(sUH{;+n$ZTF5WyD9+$r#23>k z)s1&8d%yihcbh^gW(W@t+EOD)`ChJ<2@^ZhUOq$%gbMB)J*KTLeOB zJ|~n0v3W`&G)ju)Vr1bWu0Q1s>BFK=d^ z#0=pFO0#82p@h+d}G*59Vehs$7dBh0h2;%0AGv`t_CiE@W~}d z*P<1^*tr&AtO!#7AioBrxhZXuJ|sONyB;c`HKn07TS^})-6`K$x|_RCzOVEr+&9^` zxZivIU2AsL#_D_PE_)5Ca!kPrhA_NSALl zRSEQTr}=4~p5UkWCwPW`Tc?RDGp_5SK%bty zj;;8snEWl{xm8Pwfr(VyZ?AI5p5p@^nb(j8TiUNg_^c)@*~QT69&&$L*+KNU~~yEgEV0 z4M(>9>4*0}Jo%l?y@j4wbIY=>_L-rGluaoHs(S+;zj4nsAN;VrtE-x0ng?FF@7BG4 z^5ey$hazHm`K?=ODZ@ZfU-us7wy`cHaA^5E`?3SOuK(zn|GxivB?x@s<;^Y%%YKq7 z((YBVujPsA%}2;G9uW^>Q!Vd60BkA35e{e$DT=Iu1I}}Tq_EjQoB;v%(~dL1me{06 zOa-%s;<3hpJ~rUU85Xgj)$V|KBH@mZ&Br@ICC58KZsVOGvzn--c1R2=6H`G~%+WG! zm#51ArEsk5>e9M0L!JG_ZIZ3onQfgLi@U^8Z7emKxw&(9ai4UXc3Wm&=L6Dy?NDle z=1}po+MgG{CVpA_TI$Q0zwG>a@$u-lw7*V#w|FM{J$wbP6)zOuD|YJTyN$aGM?zl- zeI-0!W^W9k4lZD&ShM95h$~_$&15wT!EI%Yq{6bEKs-*R(*Y3EDwRglh@L=G=n2Fi zj(9NM&UZ_Z(FpxqfUqj?&ek`Yd#p$M4;dm zxC6z5uaqcanON#@jsHRvaT+`yAm30SOulrw(^};nmKF$;Z+r#y#_fIBOci_R9?_<4 z@iQ*g?nJvgQ5v);v5Ozdnx;OSPnl~e-q$IIbqTqYDTw$;0usg;hGMbhyW4|o5Asyf zov4$NbLn(iM^pN=PE)#sE#Z0nobJ{qtZ%L4(>b>6f%E&Fz@M~uU~yb-WwcVO0)PiF zBE@R)cvMk|m??ZgnBzhx?qUj#3F@qXgd)A|yoa_m8C!{cU*#7ax%U4?v7CHa+{}v(k^9 z*?dKUdNQKPxjLh%V8u;$cC$`5YH*7i-Q~XDO}mXwc5MpnN&SZuoic(x1XuWut${>| zVy#Ex0>m~mgHV`et*13>FF54_u)W7&lNJ`nhol9P=h(^|v%Dp>L7I zaHuq1`s4D!(h+ooKEg~zrnIT}RN|5Hk%}MIPT?7^;7RbM7dw8C-nv26I`z&n23qaHr6Jqbk+TZi<30ReMlX z6-Z_ZN2~C~(mx6!6a+%sZ-B4Jgl*rXh$h5Iaav@=In-;brlwZX9O6ndCYm7W;0)x3 z)8^Air+Se7oe!~h5deCdm)oCXC;&I16QqOf~&1;@rv&C*Qw%|v#MmlO6!vBVbgLZfjkzMpkSe3 ziY>Sks6APLMG31~yvM#Py(_<0aE*_l17x|9UA8)dX6PAa#`jhKbYxnaj!!2Z>-b7` zrYv9xUj#-RF#T*_HCxU7S?Or*XvsYe&vvxsg042b1+|H6zDeUP?$F3J!o1*{72H%J zo3|-Qso+pRhaHBo$Ky>>sLh;1#?Ly1fQwV1W=RP-eFdGq#325NApVG8v!n;%Jr`|J z#HWc(M)Kp&@k0j}?Vuljjvt?cH$^7fA11|K{fC?^*|E6or|F8OMJwH`%6Gcw*f&n@ z?PP-~Tv;`Dwijk`mSky7k+Enj)=u(*g*D|`JMA!Mu3nCXp3da&{qXu7dV2TgfB5V@ zH|@`9Jz^E<0g(JH<@4EA;*>GAy-Of z)=)d{8VV&7s~k%t=VfT=-1zvsbXHmzZ*y0$FvQPLetZGLLUT9TP18fk$K=P<=OWKV z=hQb&^ZB>wu5 zwo$X>q_U4+8V6GDCCF%BI`70+@uBlbhV|nCCpzmyg0*r0jFnSygmoKo5!{D7B1iJN z#g^<6uBo#UWF#vST-R44s4ou;_g=D`_U2b!sTDHU$YyqGZDm*IU-aEwj#;knFaMAA zOMf$Vja9hq_S){-=|9Ru@7S3C2s~2y@_A;7`66Y|yH?3x)Fhr)u4PL2^g=88(!ykV zs%1r=clbBDMo!d%B+?UX3udrwZ{x}(LL>|3+U9jYF+4s!pm>s{fRBZIm8ZbG=6I_5 zvm)e1IA+I%HVuaqke9d$%k5;HL>`9YeY#Hxy1@9!{rG?!*A=6-SC14O^Qj(?B zKqgp1x(V~otm&%z`qBgX#`H$rt#P4kU|nanr3^F6nP`|zacgy-k>uvkIy=NuhJmL% zxSs&;<9)siNj3~nvj~Z363w8OkqZ%uWl+^}!Qg0UIz&(5-E4>f@zh%aip4Yk{GqF{ zBA)6X2a!DbL250eL)Of0uAoJr`@|EXoDj7HCCPCqnP@NW2=q!2+2c8~PR{AQ*l6T^ zf#e(~9LE;6L-naSSaMZbVOv}=d}JG$mJrJjjc0BbGtsmeSpIeS6A!H0y1$g@+lYq8 zhKe8G(!81Z;?hrMNHXE`Q`e4tZVEj%+!IH}($T5W-VvI;zK=E_ii%}<5zDep|LTe? zpYnPsE$AUnE0FP~w9n7G!%A&*RK zTRG^VNek8(I;rT;&)MRYe}_0<{n=TGL%?D?rYp*-v6Y1H9phvU#Z8rZ+gF> zenWda{ypv~?_dnP{L@e}1;{$1rgtvusJ9fX}YQ72`w+bL6)Q*PMcl&0yF z*{oB_GAV6aP1MDX2dD$+06pn`K>xJ+v+|J+?*Lcl>q;~Jf@ePSQ;mI?e?&Q~GJU~~ z3LR3ysSp*{Q&fkX(FCW*moR`Bjj`g@*E)GM(P!Pj6%A=`1 zWe(v9Z#R!5KF81Sr}!W9ZvLP*4#FDqQe7sPKd#}x0O;s zxFS6bC{7Cv$~%uuyE*jSDX9ZZbvWM!J`2iIVyK=0Rjo=zY{Xw8Pc_h%ls8~$Amx&_ z2;~7~ab;m3X%Y1G;nspc?iLB=Uwdc*S@~yte;b*QAyPz;jPMk%NBu)s$-Q|Rm)@KY z2%WEEw8R7boewoR9m6HOwOB65aj)8buSNwunopO*4CO3Ti{;JXECZ5(Y`pgRKA{kC zN0AkOJlJAGS+W3-DWfM6rWN_gD>2U3QAfpkIGb30##%lTEu`fh=8HyN&vq?)Xn%h) z;1zv_;gVD9-~Tn^?yX8*4x)4Z%jexE@wh5EnN6*6mCvN)09^vtXn^wO70y*K(jL(R z17c{XT8%ZOrE~aie%0tYL*?3XJ8vG(*PRW%*YN);EMPv%ScW`29@+p%ve-_CPBqc7KI zZlgxfR(2D&A-CS#TG(nm%uY#D@}CQh36_S8ZQ=JHC`>8 ztE4Hni}gfYF|Waz-h#(!#IBRBlSkb*usf6+tVeuDq=%J9)JL)p8xQBFDzU@fhhm5G zjNdzk?v?J9U3m21pVTmT#NzQI#pINvXEQ0CvPvmR3SeA>=T%8cr*)m8p%GCfd6 z{0NV&z$Z`6Qy;OikS!Lm>dWdsVg2~|Too&V_-S1m_P4YEZqjfAKCGPAFpPt({WulP;mQHL zYcE{n)E6Dg5bFs?UjTcKYby?JbxZNze(uWZJu7j8+@Q0l7#erNiAAhfK&#|onO88S zAW<&_8YKywho*)>wWm**}OCkSmo)mzMq+efRKSEWj8B6O*#x$kIFLy#7X7hom97 zKR;Ax*1HBa`L;>hLl+`}I79g+^ohoXmKUt(vZ8Ts4FGx=1& zElQ#+meNvMPM0j+id6@AX<)ne=HAhPHk&nvulv4!)DJiEi7KjA@^yuGQ5Cp{R3({g zRw@I{wgRkHo%MK$2Ap^Ha8IAkl}fW#e#f%R+9>|U4>nN4?Pd*?WpfQ9+-Mj=)HJ(l zLbz5F@UO3Ug;>orR9}BJukegeXB!DnswJ=Y+U;TjCDQmesaD_z!5~4FVdk%tbvWuO zqjEWwO7a2*y-)3nqG-jKLxB^zid5)AkQ%l+tG=#YR9(;j^S=5F-AmOd7Jck^qhexx za)PR%+B5X`;h~50))SetBzzLl*&D{Peg8O9H%(iyl~=%#RQFNeg&n@;Yq) zkFhTSbK|P>e6LDp=_r+?l1eJ6bX1jYsdTHQ?v~VdyWO_iP22G$+p*&##)%J14ksqe zB#sl3;BW;dsxln+~>GHn7!L)>)0C4`XoQ@x17txvKFdgFb9Ek3Z7EF5I0XCCO=ZyIb?~TNy4&NRrm+ zzw_#qgB3h3sxwah2FbZRv0;UrLsoP$L4v!YFzQF1jT#rn**oVLbrVk$d5%yw3A~wC zH}e-3ou1@E@|Wn{P%gC=1z=C^2*xDvI~n459_EqV33TFdd=1*^EC5g}ESX$CY0V=) zSCur~#~xp|t)AQTD&VjFKj69gYv4s{hO+gB4?b)p-oPu*p>o4VU`r}r3tSL$KvVb2%9 z7mT0xK9PI{yk7jNPDB<_GMP#wD5!cA9_)ss@;x^sVhDP?-fV($q93G^N!0;KCeSGf zuTeu8QfTc#c8)>pxgzTHp;mOaQAMo!>7v{1^kGR@e=A-ce?0;?X~38o(o1taP~`Y4ju7N5?vO_#g3@-Dt8P_ks&q8&_^W`Os)8QE<`q&1Z-g0~yh5 zGlOoW2J#?6JPUuM&Z3~$4H`~!Msvn;#`=`zf<}MF4K41ET5{zXu!){wNnYc&I_T}1 zS^90w>$JXyuU6`~HTKtt4z(%vO`#%f#w|L~Z25oBPQ<%3vu{3edFP zIFQc!B1h)#xVee$*#m%PQ2R;Hg#V&S^F@~!@-M`7E*y=v09}7E4X7X$pxrc1W5h18 z`dm|uVA5JO-&Kq2V@YaUU7a*as~sK!ZuJ*#b*!gb4ZXYLayMa3hi3r!CpD6ZHh!T# z$(H5ep=;)(Ssux$J~LJZR!mW|;jnVJNK7fs@+Xu}l%GTg@7YtBFDyL{|w4^zseGc)i_wexdhKb>(TZ$Z#Ps@b- z>8NLD_yXMeV(95^nj_*Cwb*m7n1;$W>KJZ38x7{4g@q$bcf1s49$a9h8{~8iG!m(L zW3WN!l97Q_GYA@q{-B!kkx)lz;D_e?`C<{IDJtEMfc?_JG;qbDxb!7;5X5wTlYtC| z)BsIzF{!+ujx1VCb`0e!cz(4-lcGB!``SWI-HB?{d1_p}XYbMOL^a6zfQR6HheN4S ztJAm3=cEkj<8PadzEm*LF-zn1=bzv6+KG*ij-&(OY&9hN5`{euFZo$ryLPC7Op2ti z5B{{}pqchF2ZJ2rlcY5d5Ob@~U%I2ZIU3H0>lK!sElr-qBR<^-r45AA2q?lIVw_%p z`IB9&zT{4D9I)btN{kL-z*>vCbb~eu>@o3rtIoApp7?G#99^+Yq6jXdLP90()3n97I!*^ZPBF6l!+(H@NgN1 z4VJTl;lXeuE(D`t;;f*5Q1r%yKvZNbsid1EIBz7DaHkU&V6<6CU*=clJ4Ag)dzC0|)8=d6|b1SlAwbsZzH~i zOopeNLSN!rTd=-YXEz|911{LVEEAhKr7ryO(1|ta!4s zGSlqeuFo{%+}EV~1Er8;3XPv!vUT&#UVNN`YJR;Z0AoO$zn?E2=APqz&5=QFh9jD2 zv6Fk8({Q~Uf3BC~&-HTr)w5)ex44{vHYKuA%aBuGM=g36(5pFU7AUF7XC9otuO^>8 zZvM5Iz!_+S`2}|`_Y4mcUKr?A;&DqB(`#ArMKkL%}=jmrldDHiJug}RRpL@Sd zd5puJ|{Aj$FI28M_ZSDBZ1+ zv9c`n#&olpPB$CcDqi6aqWIO9p$xwYe+ZCzeH9Soc>u{5kZdKS#^|#~JO-%jZDwLFfE*`8uHUG8|t%$M8p-V*tN<*|6dq+aGa` z4gBh5TlX9r*bBC3wrbV_J)n`i4S*<6Km|0w1Xu@d0z1Gya0uK9?gQU#?%scJW^QiV zrn~O#8#sI{mfCrXG(BlHj5amEfZo2KG?0>{)Bw53SIjzXHqN(p>W-sFckLY?{@C$y z{`LcQ*X%|@SF3MC@9_35f_KaD16#HnI8N>jQ`ST#6AAAH%3GJJ%B9!U>}Fh(`*r*X z6W1cQUBa6yKkDzG(p}pwcYlA~k8k)r>H()1E)@&;Xm9Q4t=+x#;`jRN_OD-G|GnOI z?Y?~7|6jZw`9`)_%zhRxepkp9auVL^RPyNcwL&geAZGEh?ZpR(`(AmX@(HxaqP@SMy4#J&CqHm(3Q5 zVDCpAJ=*v=KJ9-Nv&9VBKuE%~VSEh9!T`8Uk9Vfh+mqCsy8A0a`m`wYRSjeZvUIX{Af1J`Pp`AQB-y{e`!vg!!Uc;l zo?4%J9M88<>t%JVc;)c^?MT)iAb$jSkbB8XJIzpc0j_FNL1)%UIK7bPFOUmo%`|Vp zUfDeQgG#IY)@!!6UPAz4!&ahOR0WHV^O_!5GMI3~(hx63bx{dPaDA>Jj9c7o%B^vFEd8NP<6l4iB=G*i0^No4Dg3A0) zUbD65FJc;^13$*o=)9(eT?$IeUTtBs2Ig`3C@Ud`GlVMWQNOV2h>#V16S`&(Ta|T9 z0iVd$No_cz>7ErT73)Mrr_g^O7eL4oN9rsU9%GKPLqH3@#3Xp zDWN zw{Hh`?%uoC(52GCB}65aOkMJ*bShj=C?xq>EnE9yK|$1-$H&y*Fecfh6$I0%^vaO9 z=W%%jHl&twMPza>0lMcBAj)+ef)Ai1fhp-Omsb$7Sgu$J&9}$)kBjMm-Q$x3v_BN} zcuQ%CvRdGM)d?nP3e=FuOIAackaN8LwlUZ!QysG?6$13Nh;m-F}ozq6-!b<4@R@2aBg&N^p<1W_ujH?gDCC0W&76E_5QV(w1I7d)0AJn{q{`U z;c`Jurc^4Jy!Mb;wsUb& zl#_gyG!*Ud@oP735OwiV)%{6cb%59zC!az5y9MlP<{n%J_pQ5g9r1A%vL^rONjNz)1SP+} zxGgEpNyT)zQWT5X3YgPP%cBDzk;dWdB;~$r)6t-DX#r~qw8SiVjp$s+V_rnkGnZ|( zkZs%*mZe3`aTS*IN9a0kTjsAtj_pcVy~?nNgyb`&9S_W8M$;~a0-R~i7{Ii|oVq)cVT?xZ0p{cT;r{K1PY=Sua(PZVoIMPg!_48( zqn!PIDa4CXD8zv5ho+?mbO$c)-b|m$+;K-WcS4TGN8}vfDZN^q+G|95dA>8R@`|!x zQ>7#B|M~gC`^ZGw5*6?Wc9l^I_-o84f3%1ze30x!q$vC$!ourhBl+wfCGSprkOZxB zeYG;1EbTanh@V;}z7lqeJ>nN`6qV%EA1VJnK%yer{y~VqjW^0o0#E>GIPzbBjO6CU z<}u_HM7tXA*TYV{CiwM{J{X`xmdi$rB_jOPs;Ls_-ZFA1`Z!VcQM?DQkMnglnuwN;St*@>%10$2OUNTSvuaWX?x_z(eBp_{yvv2AQ@SK z1t^ev_) za}_UB{qSZ!M@t@MI6*{nqbaB8R|WvU~D~<^Vu}^{ZMgs(bK^Q`A z!O&7cg(2Go)T8XW6ifvJ5WU?dIoD`xtioyz!RbY}9K|&o8U=(N++V2Um;&+;x)l*F z6;y*7xte&`T?>jCa#0aG1r2_Hg`nfMYM14O%Z*^9$`SInlAd-2Sbk_?N?LtlOJ1x_ z&8!R8?yYg6&utYAK4qkJe9bN2cx=P$Bd^@LW>1(C?37l+FtkP_o^T{mDHophj< zdh3qI_6$epSb>ekQXZ$RHa1?9rcSNt!LBC% zNE+7Cu$p=}Mbx73K@*%Xz;P0e5m1Z!C16Vg6OSjJOlT4bSJ}@LC6^$|j6hRGMfM3` zCCH$%@RpiO)LOs_u=`qc8!(PmI#5vU1x*)3=xf69#E%~zw0Qh&;CHkkWA@NFeJPlRR2r zx;v^KQZb-k*(YS4lC;^8HR137U9IWYV9Ul38m^uv*>rq1KQ}SpmHOMpENZXbTS$^` z{(N**Sl2nur~3SzC!}iJ)j2ODd9x)sGkR4DC{-`TCtXJ%D`xjSmdrg6T8CXrgaVy+UVy1@x_$7&Na**SO zOc>R6%Mf@V1*mW3SRcC1PI!Leq)&gGK)Mkg%+zdtpYkETjezaXW6ObY=)5!MN( zg)@S#(cAc*pcMqUlsD=GeGy^dyySL91(%Giv2N@D+hn+7jx1n2ytT*z^X=XmA)?f! z`HfMO5PKss*slLFq7w$o@PC);LCPE+%y)u(DaCjE`e38l@l+ZCyIyI%wZ`APB5`3*x$70{)mS~j;%ob^zC-Lp*H%EHX=Vdapp(*3kQNkl?U%gX@MRlTH+e<9 z`sMaz+gmt}*}a}qr^9me8ryS)@mUHHJBwAXMq@HLs>7>9cQr4XoPGzx*=YmA8(EGb zjG;ye{*XK?osKqI9?*}b%A^(pl*A*0?>We4gl&2e#P$rK+~qf0U`2z)qPEn%arQt_YNJ0BaJ<4Yp#wr zB52J{Yv&bhV5Hs&W5TK=ek)mzJ0N2U`dO#ft0mtQ39Hbzv3{)cSEFm9CQ_38R)f{% zV&K-!>*7F)lO&(gA0S4Afvg~krqE!a^D79bpeWN;yO9)ocqG4$dN>S9O~%34?OrL! zdc#`D8w9u#@4`8|NoEBH>ZgVFMHLzCMe%#7umq1KtwLVYR>C+q8q^WUh5iB8n4FfS zo3315YR~^6&$7u;rTrOq$Z6EaHm>cwCkaZwpBM=ZD!h7kVMPD`KzEl1|FyY)3lATJ z@J^&TygVsp7)Ub#hF~~aW0X7u$0duoNHQcMkJ}~^qVKN&JPQ645W4{!1#&lC~s~jM!;WV7!o*7|QSS_9DS3^8j1H2IwC~x8_@u_Amd(!ljQ-wUh5m|>kQ;d9*w5+La6hC z#uja6JEtoByq5G1RLM8nDdG*$mmiC@|2aC6_eoNyvMNs8bmbM&*M7R#6wL?$nF>!( z1DmQou~#=+^BC&CDEKeUn~ik#^8nrh9s$H=0AmaUjGd7na6+Je9Ks<8Z7>M4kc7}4 zkMg0c%>zBO9UAN$FL;=f-M+VZC`vCFLkH}~?Zj3)oV3HL9oi!TFsdE*#)8^Kd1UK1 z-awn!=!eSo*2Px)vKp#DUtT`db9q`Vu8rMecnxoe*Rl=LbR{SfU$lf9Id~{oj<908 zVhRtHIuBI`e40=;9`~ZosltV>tTp&6nJYgc_3iKD_zhCOxrr39PfWPO(dGx@VH3!CIhSWM^qkAWZZyrl|jIh1`+(T>EJ^IbV z2VZ?;hRvT%;Xc3t$nw4J!JAD2niDM<5qtI=utC7 zn-WwAMe$czT9*7G=a&;QZ8jHKPH<6`qAad3vfJ}rN3Q3_U0gJ_Vb5t(tC%9HRoEL% z?InK=e#1StYww4OgWHdm8oRPaacrR74%Zc)3Xj&{MnfQybS767{L#Uwusq=;-y~_V zv1?*--;h9?I=`0(;w&C5>TlGDsmxH^A$F1)%AgDN&*mB%@~mEu=^O1_(F75FLtqfR z+nk=L!O0R_m4XM7$CJdK=trZ(codeRa4Y~T0eF)DC;YJN+UO$6cDRw*%iO_`B?eY4 zn=Q9m?zU)ZX1G}gA0m(`=fuRXWdFnmKjMSCd~n7GhkP*WgF2tf7x9s&No4ljMG`*J zM~1(geLhRrvcc@*Su&e#gz>q~Z&L6%3jPHJ4^j6~rzw)E$VlV@a6QL1-Bpmc4>cRp#lOU@_AIuymEV?U$-g z$Y9ajT1Kp*PV$Vqiopi9z1j`DsCgzKsMGZTe4V!L8afE!nyzYxE>{_Q){BrPeVxx) z9gM>)By)~VywC`ls8FK_p9!v6KP$&de5zbltPr{r$*jLUY)@oMej(|z`pat+U)ct~ zi#?&vZ>x2`R+5~l(JT>F#!{@Pp#!{sTYqM}C~;Oz=R3Zn;M7U*_sBW6TIpn=;z>}^gTFh^m|d$8R9$#(2)b%328%?oSYI2Hqfs+HHdp@|coh&I1D^!M z4sZw%831Vzz~Kl+{{RDLnO)3LhAc9pNVO^>e-eW;F&K(L5VOUISgfi%QiZK5?60n_ z5>;L=k4#Tbj$jBYG9QTqdF;dB?p^d^TIwiM6~Byqci;wAL;e418hqhIfEn^9B`i|Mi zP@m$odl-{O6zP1ezaVR+v0^Y#oh{~XUYixi_Kn8s*$Ct2OcCPQP7ecVhkxmtOix18haBx-QQg= zxv@P%sq0%ruJ>`et}T8_)HRjwtu+HWyF0Y)mff)*e2>EZ#Lvo&pn;%+{aZ)E(-RHW z?lK`sTFWBqn9q&GI>-C(j#npQ)^D7LPj$@$HZ_*(++?W?k5y$0p9=EjO2rSq5-dku zs?9{9_G?s&_<`nlg@-!c#YcG3ZKxO!ljsZviT68s1I7D8A-|vZGK_)Zyaq3o<-AS~ zeeiW4Vr001`U6Mqko-|y3q0su4F;k9k(DKE7KzZ|bw=Ioa|Gjy1vrz3 z6GfqaL%HoF#|wSD276iFtMBNLc?s&^d(HdrvcN)E&^8)wHSFgq<0tQRn*3GiV8JX?Tw65y)X+SqZi%+9g+5P#f1RuE@v zL5wAE_%0`g9brdwO2IIjpiarjo!I_ywk$V6ylvOWaAz=CMPM$JxlH$;(VuLJuIu z3d1O{3-hv%leYGPD2O<{(2P7APt zp6~^;Bo}xh7q?EC78*(>Cks41F^3F?bTTW3BOtQ~j)2RK_2&pUB728)2Rny!E?X(6 z7K|1!rNH(MxB#@09Dy5$Bk=a-aKHwNoyjM&NZSA|n>&ughm+8C)oi?!jgxFHJ1C2E z%gQ1_>gM8HZhC%hZdO)$N(z~dr>Ca~x^hFvFivPPJ1d9d!^RyvIbLrBGc17}5}0TE z#Zq@P=)*|UjSc9UF%G+Tqc3MF)}76-JGNYQbaZqEI|_7f1o;p*xy&=6#bs~^Sh>=B zZThYV$RX~g8_Nj^OytccZXq03mV=*n`9ynndn*SIZ#LfDr)ySW%=?F0CFeWOBV?RE9_bhCAIv1Q?b zfo`$AL@S#2XXb_2FyI{(`1T3nw;gr_m+R!{!e+q-Ky5mTdz{820!0a2=wEoB${MYL z>QjWSu6DpX)@(a#dJPm469E=H5o|jjU$(Cu>EjEw`M&P%K7qbL!4VO@uEAVR5LoQ^ zcv$0ht{k_vo`6gAM3ia|#k5K;UQUPmp%!a8)tDO=j*a+uv%@kgruq3;95pC5u(@1s zcgOv!{d^t016=T~UZLS6ahK&M?Aq+bPY$$ee&Fd(g!;pTW8>!H8S2@*5BG?S1N%od z6K=_p_{}qJ1`1<1K|wCzqS$@-oWQ6Kw$LUC=uOyXz+SaDCF(;P+{Vh<8uPKmZEW!( z9Jj?WJcw=U#%9|BW4Xf<3br*K%x1gT8Yjqu;D4AP4xvBSYKbampy{Dl_#PcRfIgeR z1JLB?$~@N87aw1!OzL1467s5)v9$TSygspBc0s<(Gp`f@NqJGtzcFXJME6X_?cnoL z=+w@lkr86wfWc#gbL(<3w-g&33--5mPrq^}wyzm^E&_5%0Acx4^o+>UVKBeEb5BZi+eRbT}p&9;d{%UW1}X;XC44 z!S>!EKFw?V!@O*)J;9XXX7PE`7{35VroX?1GvR}O8zr!{@dc9BVmN++ahqx7kb_}O z63l81W(}6bad2i!#xScF79+4?tU9Gj&z|)`8V zZ^?CZ>lqj0XqU$k*cY%f(|7^FRvp|e133i2VSqJWG+a`n;*kf1>HI zbF5QSjOpnhpd1XcK(l|S7Cv(aAGo#zE(u78b&gMHc?~pm#}h38S|0GF-pc1`_F6vH z3eV1pyfYnw3!Z!~*?zv6%}L2zcUwDFx7dsbx0jDc_=g7v3;gvgI|tW_et0s!NnaZnSlF-IvgR#C z0d}qqRzX2*S3g(yZ`e(bB%^$M@|%4Jx_de~1_im=cJM92Fsug+>+!DdfWZhJyc-ys z8Jk&t7H2HZSgx|#YrO}8O*uORzyUk=7O-FIU~rsBfzz4R;PS*R%w6ko*Xs|TBHvS- z5B(nogar-{8WQp}bXT}d#2DVe$fu;9zc$ueuqVMGu_5VY7sr1)P_>1wTaq9Bz0hrg z3GTK-JG4VP{B20F{7Zmnhj#cEgrpQ{il!afp&i&5t(F0;&VEHE#|ExK%p zahXOsR@e{|jx|OyNxE!&D9!-~-7|-2< zF0;&VEHFFwRJv@5IlJf4Wh<<+3C9}qay>^1P+f062C|d*zmF>*h9bh@a4VB@GCmbSzP}vofT_R#o+06{! z9qCAwJe*j@WrScj>-d3ITDpMuxxE6zqJ`JJF|8>qb^G@ z0*k>&^dT@QR*osbZ!V_AG~i>vYOyNRk_KuzP=?a~D&HK0QPx>`U}g~>r#4$4*F zn$E!?sa4YF-#)L)4(lvpL;2`#Na-vlj@5tl&ide8#xS7q{GiyrX00Z5?&J@vo} zQc$WyeF*dp!y05!EohUWew1D^BqfXq1(E@3QGxU={fFYIEoRc+lmkXu8L*lf@g}fP zKtqM})S9dth7}=;>P=cF0A3`MgPH zrgf@NT><()>+V^t2QSJGpoWsO6i8BI;cOsG-Dcl#EDvtFwi9e6v;%ti_{%cxuBnKyO( z6M887s!%^8_S>@?uT6F{+P-aXHCiXC+URYRt7Wy-GHP08BBKv^DBo)lmr_$*X%-kp|!&6$C&or%L zy2TPT$c~l&pdXF10bC7gXv-+gBv7Dl$g{;Rg8+uyZ0<1pNj|1BzXEAsobi%cWpNpx zs6`p)?dxl5?Pr{2TI^$-$y#h=o>N-)(xY5K`KpB0`|WeC>>VD}nY7o_D@p?zziK1_ zyly_D|DzFLoZqrA5xSa>Wq?{S2H$K7Te3kL0kWtVTqy#zbkLd(S~>y@34OgI@>DUJ zhq3@(A)4bT8e;IB2kO01j?2IZREM=506!1Vf_+6;FT_Ixs7Vl}7}3rJtvTRVMB~98 zX`rPL)L}UjWkHHx9_U5w7P9F%k&;vj+6j|}t)Uq(cU{PU#{-5*-nhUDOtp3)LPrF7+r6{qVyBe-)YEl`KU&DBMsH0$XDCu*JF^WVH!V)5lC8h|;K+#J{Z;VcgN4}XP6hG)&b6Jd0P5j9ip{N+I7t)@7 z(=O2JLSz#tm&C;RU6iAiz*mQmgtS~qxVwsj3pSN-k3qG)I7=1xyLO zwpu4wf}c`DjZCK`sx=CwjxazyvZX|hO0LxCm7NK_Qb{N)OOy(QQbDMxRzjiF%XO+M zs378@P#R<^wLVI!s#NNUJitY)t(0jXFBxIb$rQ>;nXZD+mcH9g#>S+!G{nlXYPC#9 zgyyQ`IxQ48tVpTTL)HmVBp;yzY=m4O$(5R@BeSOKWHl;H8IfOF3S=iD35h|bQ7dbK z5IPmG8jmPa$qhh-9GOm`)EEeUVr-0w6QQrJs#2?f=A~MVA&Tg&ttKjEwL~?rfdN_@ zHWLOdA=fEo1|^SBsPt9913W^eQ4m!+6}Tz~D3GvBPgE&&l`4Y)kSnQ0mNwec04@Nx z=!}I@$bkockv~l0ROz&eYPo?&z&HYZd9aU>fl5Qvl&j?BW|GwarYenGU9EsYXOv#6 zQP&cosxZp0X6S&zpGixF0ko!0sW<3=-GCEX7{Kl(TAh&vLREl^L0JjCtWyE53T=%> zt(7TSTTe!r3WOg}M+;bi?`lI82xEm3stIw*mFlY276n5~Q%j>mhX8`WKIN(s6_7c~ z-X4ZpsaCDlq5z;x%_B-=dLXM-V+wVn6GF=khN`6KXr(5qMpdDzQYuukD6Ot68rGr# z<^Xz7gaP-Wu+T%nAepz1;w=jdXka z0_Yq)8U;XaU;-uRRi={x%P4q6sSb=<5Iyp8nXU}z3~dZ-1w0D66WS6mS~bupGL&hI zarXCX0R@uj^;)?~2IE7al~-2+kI1Nmpi%?NghGm~v?U~TsyPvcBvL2=K^5hPx8e~s zDnmJJHVZc%9d1y1<7Kr9#3{ua($Z1M7H~j=2x`b9Dzyq#Dg0F;gH%-mh4kfUzyi`G z)o}3YVIv(SK*MODxLygeH=qE0O<5Pok_Qg5s+LHhFX_u=K&ujE ztC1@+Q&b1p>p`M4sDO*W;H3uopBVs-w=5AM$CHNSu}hs>0OloNQ@t9+8nP&4V;E08t@P zAQVfp(+YEhVxpi>T#zpj0ru&DY+iO=h8S=W<%;s8QGhFGBSb}@LP)ZNIXQ@xunto)pG5ok;m0m+1^IUF?(fR+SyHb;&EYaT7yKf%3>2JKTyuv*+6>_!HG`faq6 zGF~^HrYopZa^?)?YUWnvHt^ZR{EoT2{gk}@l>EP8+xAoPzwVTry3^TyQvUCGQciiV z{j|LOw7mVay#2JiZ8mQ|F>ifhZnRGOsd@XUdHbpPzu2j{+5S?7<_Kf`D%x)I+d@=@m2UdCWi8-5a8?3vultB{meLqy$%4_ zZ~qUce?#FD+zPd(j4pfVsd6&-aD}=`4Ub5x)v0+zrcPPGBXVR0jZh~m0bBGpuEBFN z3Iox{5fAX;rh#8KiXFEz*}zS(v^;5Cg~gj-w+L3C0i{rB%GX+A&~qw1X;w^l)CdL zuXB~Nt%mfPd?&JT^HjG^A?)2_%Qpva7_+wK_S^>BL1Y8#A=$t@HTOj91zAb5E$t+}9x-tqSd{@#LR;*pw#WH=lN>gUgYWPm1 z18lT%wGt~8m0FF0&mnzb3)|Jb-6K)z>p9P?E`B^}5|N@?V*hgA$h%*7E&6`&CZ56jF8g$bM1SsN_`r|8 zYY1DK6!o%kxce^K)#XR`o)s=7X8qK8)0q2PYv;ch>(q33;-2pWiNcp&mp-Tn)+dw~ zY)@?}dwP5J=9m2g`p;ypIKi2B&G6Be)p5smSAX+(NX4YZv*+8ta`JURfMjcJEH1g>XQS?}BYPRWP?99p~do0W*Gx?EZ1aye4Yfy_rvUN%|JSNSrQc`Zw z6FKlGS*hTAkZus&!qq+|HYPDHF+va*C+I|y{*Y=Q%Zv0F<#zMcfZ?0DSg-54ymgNQ z41qB>?=L5dAc8+Dp9GBOX3x!R6w!ATX(#`oL(jY%wxDEl z>b{N#=T#PkFgK0DOPkirvYp5LzWDO%^n3T_4nBP4;F7~k>rqvkPL9|d*qHiisboar z^?TaE*VMB`-VBY}VNE0d)~F+8*SE-P8&=`HCT~9f{=DZuoXFVneB#t$7siar-}lAS z1LKDFod5g5W#2gUe{ijQ(Sa)u<~>|JZ{n#J;Uib=e)-Db4mNK0!8*?Ey0V3LzMWWn zF>}L`jZZIm`DZD<8fG6I|Cn`hU&4VO>O}T;?nd_x={{lao%>ba1RZ&v^0Cy*%kZ^D z;>elH5BuLa=U$d!;}ia?Yf)fLGTC5R1u|bRD(kUjcH$iBPkP!WyZW(zNj3o)i^?Wp zE!V|bEkFXkRh6pbGJ}#3RvXH-I+dZeCGU}B0vR7e3P?W6dvR1X4p#r^dHHW;z}pUr zg4(imyRS9bU^#Q9E`L^3ekc9xkv+ZgmwYwk{YueyClWvXb|L5ab7jMI&*PJu(q~!U zQcgL{EBtWV2*0To!#V z?)zlXxho&pX4RY?oq5V>)81Q4fBR@qq{Vv!_kJ7RcUGY90MRucH;y*pNAK8CgU019 z7*r}BI^`63ucS*aHjl0j`rN2n6l`M z!`gtb8(+;+P2f&BXqf)Xyv($RlI7bSNmX6*@nE7-D?4%7>eNp%ZBo%anV*Saf>+qQ<*lJEhN*uV14YXSpwK!&x4#|fcw$|D)r*B5EPQcLnf^#_{`yN6Mbm~o zyDiRrTw{?xdhc1=-zVKmwK--#KYS89U@#|T z<#go_+*_#u!Q*`+2mX{WPB<>y!|{x)%hd9L;`^sFznD8J z$tjI>?U?8}bWHr=yj=sYME# z=GL!(X@?GE8z$e*e2W4kX{p)r9Z2)5?qqaJM;4#UBA!3#9lwH;RJ``ifo!|Y$Lu#& z8eR)J{QC6SV{zlzo3VLhs#$kCzJQFG>o&^e&6{cSd`5Y_Yecu1e^?DTHo4!m%a;eH zPCQr8E6-A1=QTv9IW7NfRbcI0)~>-B&(3YFsL+qc6hP;R?{juxCCDVoH|Gb7JSBAwc)bo^Wg4Pe*I4m9qDrQ zV83a7mt3CoWA23D4te~;0fi$vFaG0RpyLPq-6lT>?cx)%>e0gu)4qB{9CP?`!PTR$ zAD{Una+kqr_XfA*-}hWQy{uvANW~c61U2u-j|rFZtBLc~tnMTbV~z zpX#USdTdP8yav~GWP|H!lX00$oZsL&546p0wKH{{1e(S(aORsk(*|5(X=`+*6WE+K z;Hkj2U=@WYfoP^lf7qMN#NV*}sQm@yRJ(&hWWAGA1Sgtk63N;-X^ukgQP*{PEoSlMCyQk@biFk`WU|cBJ+Z zW>00A?H>xN!Ca&z#45R#A3_FEsD9FN8GIy0Dv=N(NnR3P&?zl4O^})x8AHa$^MlDi zY7qLikc3aQMM?~^$|^!aJ?b^L!I2BrpijW=`(n%P8@H6LOaJ1JI}h)ngA2>H3)1)h z5+l%>FHT1dtBllQHORrU+Xg@ zY;?$P^C$mutFKr0S+;#*XU`IsH>QgMQYYbwhGqjk52sV1M$jg6Oj3 z@z?4jEypyU2)Z&&vb96NqGhwh*6;OLHS6YkpLG6Y-L|pp(YgVr7wgjko;Ui}CHj{* zhJ_wZPg?$KaeUgurh7Amo+qWR`@FngcYKW3oC}NU&rP3ssqw|iEk`E4|Ki}_S&>7& z!LPf^4@XUvTlLO==5*~s|9J7Hxv|z0_D(L$zyImr@1|Y2_vppzDE5rV?O&7`7quOL zMLf-xu+LAf`)R<$;$K!<^bWBR*xbs~p2%sv5|t;tH=h6TqqO1gXLQ&TTi$1MW#RvH zB?|lpyw_?!%>Un7iN1q&+r9Q%4mPacwgI2?;Wy>$&ez{LKj7u7W!GP<-5)dm%)@<@yKl8y`(%Y`>YZ z?S4b#?Q`>2F8aXFLbv{$aOjNu`f0yjI^C_%KPu=0_j4ackDTyyy2aI+kyxjp+?@;6 z596ly;ivC8zH1WOf6l`p>&VC>Dg6@8&0YS~FMmb%n_>L=Q4_WgoDDYMgF*7^ZQOv{ z)Q3~=*Z4Qsfb(PcvGH+n#tk^A3Sjl0oSOeurdr}>zoo``d=hc@mFu)8FX~iND-2Sn zQ(v*MhS4wYyB!>y3p@CH=6Be}b<+DXviE8--SPK< zV?UIp*`3MV*vWcr>cuI`PwUCzkcI^!`=fWBWY!g8LgJmxsdGP$hoN$Gax=jkKwAMMekxiRCw>aQOT3up5moXfNPWc^s{xLZSg zSKa>NP?uk#)4yB%_<~~VLVO2dGiB(ZS6jCKklwGq$1L0;pPOsUR5yV|e*V+#_ghm_ z`}Vul_IobcenU0?jdtKxH^=t0|9S6~>D}rFGrP`SFfTy6_SZ!RpYXpwV)65m<`>14 z-sIYydrE&8bot3I4~A~t_w}6MYtOgbtB@93NAzq+cX_!L_xFsO9#quBa6i>)|H^H3d+Q!1jOf$N_u&_{-9HgyD?2%QK8dcaSN^nQUB&f% zho2r8hG~0{=|S8sNA5Ad`h0Tlur2GGSXZmQjOpI7sc>a<^ZFZ;VmFrzm~p;1<@WD~ zzbO59!s|gLbr<(^xBVpK=fu0--3QBO4-cr^HEY5AE-&gvd_L&op^GYyU!C>b?^UO> zQy=gBiv4-x!mV?BRMztU_C}r2fanHp>$?-`)G;*@ka*hx{II9F22Ez3eb9T$;G1@45Z< zg%ey zfpaN&VzVKp$7T8OCtr+@?yxUlW$$$>E~IyvRxtR?yyJTcucKge+}?EXX6{S- zZT<3Z&*pe{ir#XhmqpZl`?&>i=kD8d^gXq9WE{u(yL0j9PYjrR$uKtCWnKew71_X; zj&|V9^Zqlp74w7HKs_5viwFlM!v{pZJ0{=yhYzQ4^q}87e&2j!)s`dDxQEZ@)?Xs) z&o8V$OV*$IXB(g+8BC#aG-;|T_}3qKQpy5?bWvg?nHDFCj1`fI{0K6PvH-WW1qccC zr%$;uJ^Y)KG__V1}2wl8lRwi|u1-NTeIc8j!tAds-IG zj|QoaO%s4q1si-Z7~V%2AiZ0mvJZ1+gTlIOBXYuJrxHIJBQ zR&FCQS}5lI=Cb~i$l)%d=iqZ+ts%!36xtui+^-JF9W3p${sTMB#RSE!Jx*te?gh)L zRC~YrGYwHelO1}x?OcC!Q`H^U;{=ViLsS%; z?k*kU;4yi>Uw)W|6Pgz3X@s>ADxs@M-b)kvl9#=_G-<(uKZZRWD{eB!5BaGAjtY20 z=5fkF9rDA`GJ!)2NI7-0#RA>1RH)E&_r5eCO|s2$j_#i(r_J|%-uLI{-uHUH_nf9q z9NcnZaNg(4>1ChnSXa2GXYHzqb2oQPimYeLyDI*0VYj=pc=3X5_xC^f`NNO*?Ja1p zxWDGzofC!JA8vn1TygZ^KUyz#if3NU+qb)``|Z`o`<~%@`_JAw{hk5)$u?@q1bp!B zHPzbQQ*wXLE0^|dddTs1&*tXhm%FA{8n&1t;TCrMgGZm3x25|nX3v`qx3(2{zPY7k z$Fi}1YFxbIxuqu0u{~Z6EH-)fE+K24CA2jQZDSqHt@TYS*!gqlw##Qud_A^)TI?Qg zDSO_uh0{+xx5T z5bmy;^dWJjr^;=poVu%F!Rf7sr#Ef2+!V9e5`Z()dTzcmQ`+?vuMc^Yt-An493oKXRmy-{n!e=`NX`) z>QjyN9iOdbel-8rU6mClSDk2iET+9Yc;vZ1Z+NF|O8aiAzTjN_D#5IniDSW z`)Y6fD}yCnW}I2I_0_K{k6Rl;e?0KUmx~$;JZs+k-Of7)H}LPD>-f#3g;T!ZUUOUy z;HxjbTXSG$u)q^}sB5672IhYOCw#!ll^kuR5Rp)O92r`&n=6m>E5-;r?AU9WQLOto$*y@tEoK!G)ic zc0F>YV9X6OD)ZWNRD6I7$rA7ME28LnGDk(}01gz%0dNP<>GTxDy6{6u~bhJHw$UinGLF!AKIgw9ahq#nXiu8HgM3J5)^q*wteKngGV zYrG6`0fFpqbRE5s-~aqZmBiyC9fh+vONvb*S6pEv)k}h<1codVa{M!uBa{OgeWtSs!35m0U`!Akf*mFxu+i8E$%jw-SEOw*!4r@NY$ zfs3#51@hJZd~dfJ__%!aT*ijmf(5+S_eh<@m(@LLch^=Oe3d33tOu$-?}gCW1c6u*Ix zR!L0j7X28!fDu{>Lyn+qoGpo5*v(63o6imZgT850z*X zoPnx*byUOg9%opP)n*H@^HCWTMrFzAR(P@VAsJXjcNqCq%CVNgdFJDZ3qEsPP&{MT zqCv?GA4;+)#RHb{zf43n+niE0WJl!Ed?-4|N7C{-$$?8M$M4W~4u!A{UdiZn8C$QW_lgr#XJmNt>|d1}9nWs2tY zt{9kr&Gz4E@%t0Z*lfncoX@nH!(kt1wCG8aE@TM}SxD$KbdklN(NhGYu`=+>XDqZu zH(gICWe&GRg+gRzAS62jZvJ|_rPyXE9()fk)%Oqsj0PDI%cFFS(lsImMx;Etx<=_5 z5d$Mq9$j4{qbvSrA8z>JZZp|2n6Gslum{dpM_@mZwH>BWN0sPntiDiCaMr@zjUDZa zfh>i2)W$#;iV97nC@W1+c`5I~%6xS^pb2cQ0}y})u(!oZMNx(W9F$@+10E3&s1<~= z)nQ@@x{xF-dO}AQS`2g%+9;*&lg$5H0|qGa2{JEXMFQMfT6wp~@rmAxporAgE4rFF zv@7>U=eWfkz2{wnZ=8bPwBOTGICb*8PtV-aP<+?;mwvIiVqGu!v1ujO`P00QyRVE} zFy*PoKm5zDmLKR0GW`##r$Iuwr87q7G2c`FZ zFs|sgmfV|vk3aulk#6D9ZOt9R(pjP6=JVR8Zupn>E-5SVQHl5Ds9^6(!rwzLf&nbU zPz*p<5*R$MKrawTVg$=(U?ggi)AVQqla3L33eLoEJrc(=^x$?D&R&`xX~?vt=#2bG zTAyjlz}7$AAFIpsr=#IH#PGNjj%U_}6YwY{3s;$#K8v0q6C>e4M7lqc$b!M6pA0=J z3r1(bvdk};r4CY;#WqVFWR^OpEOk&>>Y%dJ0gsz9YJ=+~1B0tE4Wsn>j5(r79elV9 zn@fhE@$@-o2v(no8FcCEg<(hr9>NUw$55&MU}NqRczD0TGn)8AiTuu=MDb^hQ Date: Fri, 26 May 2023 15:01:43 -0500 Subject: [PATCH 108/125] fix checkstyle --- .../src/main/java/org/dspace/app/itemimport/ItemImport.java | 2 +- .../test/java/org/dspace/app/itemimport/ItemImportCLIIT.java | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java index bcf7afed38..c94e163243 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java +++ b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java @@ -78,7 +78,7 @@ public class ItemImport extends DSpaceRunnable { protected boolean zip = false; protected boolean remoteUrl = false; protected String zipfilename = null; - protected boolean zipvalid= false; + protected boolean zipvalid = false; protected boolean help = false; protected File workDir = null; protected File workFile = null; diff --git a/dspace-api/src/test/java/org/dspace/app/itemimport/ItemImportCLIIT.java b/dspace-api/src/test/java/org/dspace/app/itemimport/ItemImportCLIIT.java index 02a0a8aee0..08ae3af4ae 100644 --- a/dspace-api/src/test/java/org/dspace/app/itemimport/ItemImportCLIIT.java +++ b/dspace-api/src/test/java/org/dspace/app/itemimport/ItemImportCLIIT.java @@ -269,7 +269,8 @@ public class ItemImportCLIIT extends AbstractIntegrationTestWithDatabase { Path.of(tempDir.toString() + "/" + PDF_NAME)); String[] args = new String[] { "import", "-a", "-e", admin.getEmail(), "-c", collection.getID().toString(), - "-s", tempDir.toString(), "-z", PDF_NAME, "-m", tempDir.toString() + "/mapfile.out" }; + "-s", tempDir.toString(), "-z", PDF_NAME, "-m", tempDir.toString() + + "/mapfile.out" }; try { perfomImportScript(args); } catch (Exception e) { From 43ab705568b136651d9086119a11001c73cc08e9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Paulo=20Gra=C3=A7a?= Date: Fri, 26 May 2023 22:47:02 +0100 Subject: [PATCH 109/125] fixing code style errors --- .../plugins/AccessStatusElementItemCompilePlugin.java | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/dspace-oai/src/main/java/org/dspace/xoai/app/plugins/AccessStatusElementItemCompilePlugin.java b/dspace-oai/src/main/java/org/dspace/xoai/app/plugins/AccessStatusElementItemCompilePlugin.java index 65ec251b21..a854b4b42d 100644 --- a/dspace-oai/src/main/java/org/dspace/xoai/app/plugins/AccessStatusElementItemCompilePlugin.java +++ b/dspace-oai/src/main/java/org/dspace/xoai/app/plugins/AccessStatusElementItemCompilePlugin.java @@ -9,18 +9,19 @@ package org.dspace.xoai.app.plugins; import java.sql.SQLException; import java.util.List; + +import com.lyncode.xoai.dataprovider.xml.xoai.Element; +import com.lyncode.xoai.dataprovider.xml.xoai.Metadata; import org.dspace.access.status.factory.AccessStatusServiceFactory; import org.dspace.access.status.service.AccessStatusService; import org.dspace.content.Item; import org.dspace.core.Context; import org.dspace.xoai.app.XOAIExtensionItemCompilePlugin; import org.dspace.xoai.util.ItemUtils; -import com.lyncode.xoai.dataprovider.xml.xoai.Element; -import com.lyncode.xoai.dataprovider.xml.xoai.Metadata; /** * AccessStatusElementItemCompilePlugin aims to add structured information about the - * Access Status of the item (if any). + * Access Status of the item (if any). * The xoai document will be enriched with a structure like that *
@@ -32,7 +33,7 @@ import com.lyncode.xoai.dataprovider.xml.xoai.Metadata;
  *   ;
  * }
  * 
- * Returning Values are based on: + * Returning Values are based on: * @see org.dspace.access.status.DefaultAccessStatusHelper DefaultAccessStatusHelper */ public class AccessStatusElementItemCompilePlugin implements XOAIExtensionItemCompilePlugin { From a9eab4a254b6fa151da0a86dfdf02217afdcf965 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Paulo=20Gra=C3=A7a?= Date: Sat, 27 May 2023 08:23:46 +0100 Subject: [PATCH 110/125] also add support for access status at bitstream level --- .../crosswalks/oai/metadataFormats/oai_openaire.xsl | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/dspace/config/crosswalks/oai/metadataFormats/oai_openaire.xsl b/dspace/config/crosswalks/oai/metadataFormats/oai_openaire.xsl index 19b1486f4c..3a1d75eb56 100644 --- a/dspace/config/crosswalks/oai/metadataFormats/oai_openaire.xsl +++ b/dspace/config/crosswalks/oai/metadataFormats/oai_openaire.xsl @@ -75,6 +75,9 @@ + + @@ -93,9 +96,6 @@ - - @@ -1162,11 +1162,11 @@ - + + select="/doc:metadata/doc:element[@name='others']/doc:element[@name='access-status']/doc:field[@name='value']/text()"/> From c11679c6defd3d68496006fdbb3c01869ef5a3ca Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Paulo=20Gra=C3=A7a?= Date: Sat, 27 May 2023 09:19:15 +0100 Subject: [PATCH 111/125] removing tailing semicolon --- .../app/plugins/AccessStatusElementItemCompilePlugin.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dspace-oai/src/main/java/org/dspace/xoai/app/plugins/AccessStatusElementItemCompilePlugin.java b/dspace-oai/src/main/java/org/dspace/xoai/app/plugins/AccessStatusElementItemCompilePlugin.java index a854b4b42d..6b3c5ded98 100644 --- a/dspace-oai/src/main/java/org/dspace/xoai/app/plugins/AccessStatusElementItemCompilePlugin.java +++ b/dspace-oai/src/main/java/org/dspace/xoai/app/plugins/AccessStatusElementItemCompilePlugin.java @@ -29,8 +29,8 @@ import org.dspace.xoai.util.ItemUtils; * * * open.access - * ; - * ; + *
+ * * } * * Returning Values are based on: From b272b1fcab1f5340c35563c5840dadd01d4a7d33 Mon Sep 17 00:00:00 2001 From: "Mark H. Wood" Date: Wed, 31 May 2023 17:37:34 -0400 Subject: [PATCH 112/125] Make service lookup retry log at DEBUG; radically shorten the trace. --- .../servicemanager/DSpaceServiceManager.java | 19 +++++--- .../java/org/dspace/utils/CallStackUtils.java | 44 +++++++++++++++++++ 2 files changed, 56 insertions(+), 7 deletions(-) create mode 100644 dspace-services/src/main/java/org/dspace/utils/CallStackUtils.java diff --git a/dspace-services/src/main/java/org/dspace/servicemanager/DSpaceServiceManager.java b/dspace-services/src/main/java/org/dspace/servicemanager/DSpaceServiceManager.java index afd1627f5e..6cffa7ee66 100644 --- a/dspace-services/src/main/java/org/dspace/servicemanager/DSpaceServiceManager.java +++ b/dspace-services/src/main/java/org/dspace/servicemanager/DSpaceServiceManager.java @@ -7,6 +7,8 @@ */ package org.dspace.servicemanager; +import static org.apache.logging.log4j.Level.DEBUG; + import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.ArrayList; @@ -21,6 +23,8 @@ import java.util.Map; import javax.annotation.PreDestroy; import org.apache.commons.lang3.ArrayUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.dspace.kernel.Activator; import org.dspace.kernel.config.SpringLoader; import org.dspace.kernel.mixins.ConfigChangeListener; @@ -28,8 +32,7 @@ import org.dspace.kernel.mixins.ServiceChangeListener; import org.dspace.kernel.mixins.ServiceManagerReadyAware; import org.dspace.servicemanager.config.DSpaceConfigurationService; import org.dspace.servicemanager.spring.DSpaceBeanFactoryPostProcessor; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import org.dspace.utils.CallStackUtils; import org.springframework.beans.BeansException; import org.springframework.beans.factory.ListableBeanFactory; import org.springframework.beans.factory.NoSuchBeanDefinitionException; @@ -44,7 +47,7 @@ import org.springframework.context.support.ClassPathXmlApplicationContext; */ public final class DSpaceServiceManager implements ServiceManagerSystem { - private static Logger log = LoggerFactory.getLogger(DSpaceServiceManager.class); + private static Logger log = LogManager.getLogger(); public static final String CONFIG_PATH = "spring/spring-dspace-applicationContext.xml"; public static final String CORE_RESOURCE_PATH = "classpath*:spring/spring-dspace-core-services.xml"; @@ -426,9 +429,10 @@ public final class DSpaceServiceManager implements ServiceManagerSystem { service = (T) applicationContext.getBean(name, type); } catch (BeansException e) { // no luck, try the fall back option - log.warn( + log.debug( "Unable to locate bean by name or id={}." - + " Will try to look up bean by type next.", name, e); + + " Will try to look up bean by type next.", name); + CallStackUtils.logCaller(log, DEBUG); service = null; } } else { @@ -437,8 +441,9 @@ public final class DSpaceServiceManager implements ServiceManagerSystem { service = (T) applicationContext.getBean(type.getName(), type); } catch (BeansException e) { // no luck, try the fall back option - log.warn("Unable to locate bean by name or id={}." - + " Will try to look up bean by type next.", type.getName(), e); + log.debug("Unable to locate bean by name or id={}." + + " Will try to look up bean by type next.", type::getName); + CallStackUtils.logCaller(log, DEBUG); service = null; } } diff --git a/dspace-services/src/main/java/org/dspace/utils/CallStackUtils.java b/dspace-services/src/main/java/org/dspace/utils/CallStackUtils.java new file mode 100644 index 0000000000..cb60a223a1 --- /dev/null +++ b/dspace-services/src/main/java/org/dspace/utils/CallStackUtils.java @@ -0,0 +1,44 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.utils; + +import static java.lang.StackWalker.Option.RETAIN_CLASS_REFERENCE; + +import java.lang.StackWalker.StackFrame; + +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.Logger; + +/** + * Utility methods for manipulating call stacks. + * + * @author mwood + */ +public class CallStackUtils { + private CallStackUtils() {} + + /** + * Log the class, method and line of the caller's caller. + * + * @param log logger to use. + * @param level log at this level, if enabled. + */ + static public void logCaller(Logger log, Level level) { + if (log.isEnabled(level)) { + StackWalker stack = StackWalker.getInstance(RETAIN_CLASS_REFERENCE); + StackFrame caller = stack.walk(stream -> stream.skip(2) + .findFirst() + .get()); + String callerClassName = caller.getDeclaringClass().getCanonicalName(); + String callerMethodName = caller.getMethodName(); + int callerLine = caller.getLineNumber(); + log.log(level, "Called from {}.{} line {}.", + callerClassName, callerMethodName, callerLine); + } + } +} From 56c8820d31439ce613585fadf1813ee62d45d0b1 Mon Sep 17 00:00:00 2001 From: Giuseppe Digilio Date: Thu, 1 Jun 2023 14:41:58 +0200 Subject: [PATCH 113/125] [DURACOM-126] Improve test in order to check item can be submitted --- .../java/org/dspace/builder/WorkspaceItemBuilder.java | 4 ++++ .../dspace/app/rest/WorkspaceItemRestRepositoryIT.java | 10 +++++++++- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/dspace-api/src/test/java/org/dspace/builder/WorkspaceItemBuilder.java b/dspace-api/src/test/java/org/dspace/builder/WorkspaceItemBuilder.java index 8b7bc2978b..9d786d4761 100644 --- a/dspace-api/src/test/java/org/dspace/builder/WorkspaceItemBuilder.java +++ b/dspace-api/src/test/java/org/dspace/builder/WorkspaceItemBuilder.java @@ -189,6 +189,10 @@ public class WorkspaceItemBuilder extends AbstractBuilder Date: Thu, 1 Jun 2023 14:48:46 +0200 Subject: [PATCH 114/125] [DURACOM-126] add java doc --- .../main/java/org/dspace/app/util/SubmissionStepConfig.java | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/dspace-api/src/main/java/org/dspace/app/util/SubmissionStepConfig.java b/dspace-api/src/main/java/org/dspace/app/util/SubmissionStepConfig.java index 1a8f2744b8..28d39d911b 100644 --- a/dspace-api/src/main/java/org/dspace/app/util/SubmissionStepConfig.java +++ b/dspace-api/src/main/java/org/dspace/app/util/SubmissionStepConfig.java @@ -176,6 +176,12 @@ public class SubmissionStepConfig implements Serializable { return visibilityOutside; } + /** + * Check if given submission section object is hidden for the current submission scope + * + * @param obj the InProgressSubmission to check + * @return true if the submission section is hidden, false otherwise + */ public boolean isHiddenForInProgressSubmission(InProgressSubmission obj) { String scopeToCheck = getScope(obj); From 0ec27875bcb3bab66a38ddc3987ed401c67a2f0d Mon Sep 17 00:00:00 2001 From: Francesco Pio Scognamiglio Date: Wed, 31 May 2023 16:47:02 +0200 Subject: [PATCH 115/125] [DURACOM-149] use right formatter for mapping of dc.date.issued in pubmed live import; added integration tests --- .../PubmedDateMetadatumContributor.java | 18 +- .../PubmedImportMetadataSourceServiceIT.java | 213 ++++++++++++++++++ .../app/rest/pubmedimport-fetch-test.xml | 14 ++ .../app/rest/pubmedimport-fetch-test2.xml | 14 ++ .../app/rest/pubmedimport-search-test.xml | 194 ++++++++++++++++ .../app/rest/pubmedimport-search-test2.xml | 132 +++++++++++ 6 files changed, 578 insertions(+), 7 deletions(-) create mode 100644 dspace-server-webapp/src/test/java/org/dspace/app/rest/PubmedImportMetadataSourceServiceIT.java create mode 100644 dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-fetch-test.xml create mode 100644 dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-fetch-test2.xml create mode 100644 dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-search-test.xml create mode 100644 dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-search-test2.xml diff --git a/dspace-api/src/main/java/org/dspace/importer/external/pubmed/metadatamapping/contributor/PubmedDateMetadatumContributor.java b/dspace-api/src/main/java/org/dspace/importer/external/pubmed/metadatamapping/contributor/PubmedDateMetadatumContributor.java index 6536026058..add9caef1b 100644 --- a/dspace-api/src/main/java/org/dspace/importer/external/pubmed/metadatamapping/contributor/PubmedDateMetadatumContributor.java +++ b/dspace-api/src/main/java/org/dspace/importer/external/pubmed/metadatamapping/contributor/PubmedDateMetadatumContributor.java @@ -15,8 +15,8 @@ import java.util.Date; import java.util.LinkedList; import java.util.List; +import org.apache.commons.lang3.StringUtils; import org.apache.logging.log4j.Logger; -import org.dspace.content.DCDate; import org.dspace.importer.external.metadatamapping.MetadataFieldConfig; import org.dspace.importer.external.metadatamapping.MetadataFieldMapping; import org.dspace.importer.external.metadatamapping.MetadatumDTO; @@ -107,28 +107,30 @@ public class PubmedDateMetadatumContributor implements MetadataContributor LinkedList dayList = (LinkedList) day.contributeMetadata(t); for (int i = 0; i < yearList.size(); i++) { - DCDate dcDate = null; + String resultDateString = ""; String dateString = ""; + SimpleDateFormat resultFormatter = null; if (monthList.size() > i && dayList.size() > i) { dateString = yearList.get(i).getValue() + "-" + monthList.get(i).getValue() + "-" + dayList.get(i).getValue(); + resultFormatter = new SimpleDateFormat("yyyy-MM-dd"); } else if (monthList.size() > i) { dateString = yearList.get(i).getValue() + "-" + monthList.get(i).getValue(); + resultFormatter = new SimpleDateFormat("yyyy-MM"); } else { dateString = yearList.get(i).getValue(); + resultFormatter = new SimpleDateFormat("yyyy"); } int j = 0; // Use the first dcDate that has been formatted (Config should go from most specific to most lenient) - while (j < dateFormatsToAttempt.size()) { + while (j < dateFormatsToAttempt.size() && StringUtils.isBlank(resultDateString)) { String dateFormat = dateFormatsToAttempt.get(j); try { SimpleDateFormat formatter = new SimpleDateFormat(dateFormat); Date date = formatter.parse(dateString); - dcDate = new DCDate(date); - values.add(metadataFieldMapping.toDCValue(field, formatter.format(date))); - break; + resultDateString = resultFormatter.format(date); } catch (ParseException e) { // Multiple dateformats can be configured, we don't want to print the entire stacktrace every // time one of those formats fails. @@ -138,7 +140,9 @@ public class PubmedDateMetadatumContributor implements MetadataContributor } j++; } - if (dcDate == null) { + if (StringUtils.isNotBlank(resultDateString)) { + values.add(metadataFieldMapping.toDCValue(field, resultDateString)); + } else { log.info( "Failed parsing " + dateString + ", check " + "the configured dataformats in config/spring/api/pubmed-integration.xml"); diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/PubmedImportMetadataSourceServiceIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/PubmedImportMetadataSourceServiceIT.java new file mode 100644 index 0000000000..79b8ec3f72 --- /dev/null +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/PubmedImportMetadataSourceServiceIT.java @@ -0,0 +1,213 @@ +/** + * The contents of this file are subject to the license and copyright + * detailed in the LICENSE and NOTICE files at the root of the source + * tree and available online at + * + * http://www.dspace.org/license/ + */ +package org.dspace.app.rest; + +import static org.junit.Assert.assertEquals; +import static org.mockito.Mockito.when; + +import java.io.InputStream; +import java.nio.charset.Charset; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + +import org.apache.commons.io.IOUtils; +import org.apache.http.client.methods.CloseableHttpResponse; +import org.apache.http.impl.client.CloseableHttpClient; +import org.dspace.importer.external.datamodel.ImportRecord; +import org.dspace.importer.external.liveimportclient.service.LiveImportClientImpl; +import org.dspace.importer.external.metadatamapping.MetadatumDTO; +import org.dspace.importer.external.pubmed.service.PubmedImportMetadataSourceServiceImpl; +import org.junit.Test; +import org.mockito.ArgumentMatchers; +import org.mockito.Mockito; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * Integration tests for {@link PubmedImportMetadataSourceServiceImpl} + * + * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) + */ +public class PubmedImportMetadataSourceServiceIT extends AbstractLiveImportIntegrationTest { + + @Autowired + private PubmedImportMetadataSourceServiceImpl pubmedImportMetadataServiceImpl; + + @Autowired + private LiveImportClientImpl liveImportClientImpl; + + @Test + public void pubmedImportMetadataGetRecordsTest() throws Exception { + context.turnOffAuthorisationSystem(); + + CloseableHttpClient originalHttpClient = liveImportClientImpl.getHttpClient(); + CloseableHttpClient httpClient = Mockito.mock(CloseableHttpClient.class); + try (InputStream fetchFile = getClass().getResourceAsStream("pubmedimport-fetch-test.xml"); + InputStream searchFile = getClass().getResourceAsStream("pubmedimport-search-test.xml")) { + liveImportClientImpl.setHttpClient(httpClient); + + CloseableHttpResponse fetchResponse = mockResponse( + IOUtils.toString(fetchFile, Charset.defaultCharset()), 200, "OK"); + CloseableHttpResponse searchResponse = mockResponse( + IOUtils.toString(searchFile, Charset.defaultCharset()), 200, "OK"); + + when(httpClient.execute(ArgumentMatchers.any())).thenReturn(fetchResponse).thenReturn(searchResponse); + + context.restoreAuthSystemState(); + ArrayList collection2match = getRecords(); + Collection recordsImported = pubmedImportMetadataServiceImpl.getRecords("test query", 0, 1); + assertEquals(1, recordsImported.size()); + matchRecords(new ArrayList(recordsImported), collection2match); + } finally { + liveImportClientImpl.setHttpClient(originalHttpClient); + } + } + + @Test + public void pubmedImportMetadataGetRecords2Test() throws Exception { + context.turnOffAuthorisationSystem(); + + CloseableHttpClient originalHttpClient = liveImportClientImpl.getHttpClient(); + CloseableHttpClient httpClient = Mockito.mock(CloseableHttpClient.class); + try (InputStream fetchFile = getClass().getResourceAsStream("pubmedimport-fetch-test2.xml"); + InputStream searchFile = getClass().getResourceAsStream("pubmedimport-search-test2.xml")) { + liveImportClientImpl.setHttpClient(httpClient); + + CloseableHttpResponse fetchResponse = mockResponse( + IOUtils.toString(fetchFile, Charset.defaultCharset()), 200, "OK"); + CloseableHttpResponse searchResponse = mockResponse( + IOUtils.toString(searchFile, Charset.defaultCharset()), 200, "OK"); + + when(httpClient.execute(ArgumentMatchers.any())).thenReturn(fetchResponse).thenReturn(searchResponse); + + context.restoreAuthSystemState(); + ArrayList collection2match = getRecords2(); + Collection recordsImported = pubmedImportMetadataServiceImpl.getRecords("test query", 0, 1); + assertEquals(1, recordsImported.size()); + matchRecords(new ArrayList(recordsImported), collection2match); + } finally { + liveImportClientImpl.setHttpClient(originalHttpClient); + } + } + + private ArrayList getRecords() { + ArrayList records = new ArrayList<>(); + List metadatums = new ArrayList(); + //define first record + MetadatumDTO title = createMetadatumDTO("dc","title", null, + "Teaching strategies of clinical reasoning in advanced nursing clinical practice: A scoping review."); + MetadatumDTO description1 = createMetadatumDTO("dc", "description", "abstract", "To report and synthesize" + + " the main strategies for teaching clinical reasoning described in the literature in the context of" + + " advanced clinical practice and promote new areas of research to improve the pedagogical approach" + + " to clinical reasoning in Advanced Practice Nursing."); + MetadatumDTO description2 = createMetadatumDTO("dc", "description", "abstract", "Clinical reasoning and" + + " clinical thinking are essential elements in the advanced nursing clinical practice decision-making" + + " process. The quality improvement of care is related to the development of those skills." + + " Therefore, it is crucial to optimize teaching strategies that can enhance the role of clinical" + + " reasoning in advanced clinical practice."); + MetadatumDTO description3 = createMetadatumDTO("dc", "description", "abstract", "A scoping review was" + + " conducted using the framework developed by Arksey and O'Malley as a research strategy." + + " Consistent with the nature of scoping reviews, a study protocol has been established."); + MetadatumDTO description4 = createMetadatumDTO("dc", "description", "abstract", "The studies included and" + + " analyzed in this scoping review cover from January 2016 to June 2022. Primary studies and secondary" + + " revision studies, published in biomedical databases, were selected, including qualitative ones." + + " Electronic databases used were: CINAHL, PubMed, Cochrane Library, Scopus, and OVID." + + " Three authors independently evaluated the articles for titles, abstracts, and full text."); + MetadatumDTO description5 = createMetadatumDTO("dc", "description", "abstract", "1433 articles were examined," + + " applying the eligibility and exclusion criteria 73 studies were assessed for eligibility," + + " and 27 were included in the scoping review. The results that emerged from the review were" + + " interpreted and grouped into three macro strategies (simulations-based education, art and visual" + + " thinking, and other learning approaches) and nineteen educational interventions."); + MetadatumDTO description6 = createMetadatumDTO("dc", "description", "abstract", "Among the different" + + " strategies, the simulations are the most used. Despite this, our scoping review reveals that is" + + " necessary to use different teaching strategies to stimulate critical thinking, improve diagnostic" + + " reasoning, refine clinical judgment, and strengthen decision-making. However, it is not possible to" + + " demonstrate which methodology is more effective in obtaining the learning outcomes necessary to" + + " acquire an adequate level of judgment and critical thinking. Therefore, it will be" + + " necessary to relate teaching methodologies with the skills developed."); + MetadatumDTO identifierOther = createMetadatumDTO("dc", "identifier", "other", "36708638"); + MetadatumDTO author1 = createMetadatumDTO("dc", "contributor", "author", "Giuffrida, Silvia"); + MetadatumDTO author2 = createMetadatumDTO("dc", "contributor", "author", "Silano, Verdiana"); + MetadatumDTO author3 = createMetadatumDTO("dc", "contributor", "author", "Ramacciati, Nicola"); + MetadatumDTO author4 = createMetadatumDTO("dc", "contributor", "author", "Prandi, Cesarina"); + MetadatumDTO author5 = createMetadatumDTO("dc", "contributor", "author", "Baldon, Alessia"); + MetadatumDTO author6 = createMetadatumDTO("dc", "contributor", "author", "Bianchi, Monica"); + MetadatumDTO date = createMetadatumDTO("dc", "date", "issued", "2023-02"); + MetadatumDTO language = createMetadatumDTO("dc", "language", "iso", "en"); + MetadatumDTO subject1 = createMetadatumDTO("dc", "subject", null, "Advanced practice nursing"); + MetadatumDTO subject2 = createMetadatumDTO("dc", "subject", null, "Clinical reasoning"); + MetadatumDTO subject3 = createMetadatumDTO("dc", "subject", null, "Critical thinking"); + MetadatumDTO subject4 = createMetadatumDTO("dc", "subject", null, "Educational strategies"); + MetadatumDTO subject5 = createMetadatumDTO("dc", "subject", null, "Nursing education"); + MetadatumDTO subject6 = createMetadatumDTO("dc", "subject", null, "Teaching methodology"); + + metadatums.add(title); + metadatums.add(description1); + metadatums.add(description2); + metadatums.add(description3); + metadatums.add(description4); + metadatums.add(description5); + metadatums.add(description6); + metadatums.add(identifierOther); + metadatums.add(author1); + metadatums.add(author2); + metadatums.add(author3); + metadatums.add(author4); + metadatums.add(author5); + metadatums.add(author6); + metadatums.add(date); + metadatums.add(language); + metadatums.add(subject1); + metadatums.add(subject2); + metadatums.add(subject3); + metadatums.add(subject4); + metadatums.add(subject5); + metadatums.add(subject6); + ImportRecord record = new ImportRecord(metadatums); + + records.add(record); + return records; + } + + private ArrayList getRecords2() { + ArrayList records = new ArrayList<>(); + List metadatums = new ArrayList(); + //define first record + MetadatumDTO title = createMetadatumDTO("dc","title", null, "Searching NCBI Databases Using Entrez."); + MetadatumDTO description = createMetadatumDTO("dc", "description", "abstract", "One of the most widely" + + " used interfaces for the retrieval of information from biological databases is the NCBI Entrez" + + " system. Entrez capitalizes on the fact that there are pre-existing, logical relationships between" + + " the individual entries found in numerous public databases. The existence of such natural" + + " connections, mostly biological in nature, argued for the development of a method through which" + + " all the information about a particular biological entity could be found without having to" + + " sequentially visit and query disparate databases. Two basic protocols describe simple, text-based" + + " searches, illustrating the types of information that can be retrieved through the Entrez system." + + " An alternate protocol builds upon the first basic protocol, using additional," + + " built-in features of the Entrez system, and providing alternative ways to issue the initial query." + + " The support protocol reviews how to save frequently issued queries. Finally, Cn3D, a structure" + + " visualization tool, is also discussed."); + MetadatumDTO identifierOther = createMetadatumDTO("dc", "identifier", "other", "21975942"); + MetadatumDTO author1 = createMetadatumDTO("dc", "contributor", "author", "Gibney, Gretchen"); + MetadatumDTO author2 = createMetadatumDTO("dc", "contributor", "author", "Baxevanis, Andreas D"); + MetadatumDTO date = createMetadatumDTO("dc", "date", "issued", "2011-10"); + MetadatumDTO language = createMetadatumDTO("dc", "language", "iso", "en"); + + metadatums.add(title); + metadatums.add(description); + metadatums.add(identifierOther); + metadatums.add(author1); + metadatums.add(author2); + metadatums.add(date); + metadatums.add(language); + ImportRecord record = new ImportRecord(metadatums); + + records.add(record); + return records; + } + +} \ No newline at end of file diff --git a/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-fetch-test.xml b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-fetch-test.xml new file mode 100644 index 0000000000..4f921658e3 --- /dev/null +++ b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-fetch-test.xml @@ -0,0 +1,14 @@ + + + + 1 + 1 + 0 + 1 + MCID_64784b5ab65e3b2b2253cd3a + + 36708638 + + + "10 1016 j nepr 2023 103548"[All Fields] + \ No newline at end of file diff --git a/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-fetch-test2.xml b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-fetch-test2.xml new file mode 100644 index 0000000000..1ff9570777 --- /dev/null +++ b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-fetch-test2.xml @@ -0,0 +1,14 @@ + + + + 1 + 1 + 0 + 1 + MCID_64784b12ccf058150336d6a8 + + 21975942 + + + "10 1002 0471142905 hg0610s71"[All Fields] + \ No newline at end of file diff --git a/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-search-test.xml b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-search-test.xml new file mode 100644 index 0000000000..666fb1e7d5 --- /dev/null +++ b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-search-test.xml @@ -0,0 +1,194 @@ + + + + + + 36708638 + + 2023 + 02 + 23 + + + 2023 + 02 + 23 + +
+ + 1873-5223 + + 67 + + 2023 + Feb + + + Nurse education in practice + Nurse Educ Pract + + Teaching strategies of clinical reasoning in advanced nursing clinical practice: A scoping review. + + 103548 + 103548 + + 10.1016/j.nepr.2023.103548 + S1471-5953(23)00010-0 + + To report and synthesize the main strategies for teaching clinical reasoning described in the literature in the context of advanced clinical practice and promote new areas of research to improve the pedagogical approach to clinical reasoning in Advanced Practice Nursing. + Clinical reasoning and clinical thinking are essential elements in the advanced nursing clinical practice decision-making process. The quality improvement of care is related to the development of those skills. Therefore, it is crucial to optimize teaching strategies that can enhance the role of clinical reasoning in advanced clinical practice. + A scoping review was conducted using the framework developed by Arksey and O'Malley as a research strategy. Consistent with the nature of scoping reviews, a study protocol has been established. + The studies included and analyzed in this scoping review cover from January 2016 to June 2022. Primary studies and secondary revision studies, published in biomedical databases, were selected, including qualitative ones. Electronic databases used were: CINAHL, PubMed, Cochrane Library, Scopus, and OVID. Three authors independently evaluated the articles for titles, abstracts, and full text. + 1433 articles were examined, applying the eligibility and exclusion criteria 73 studies were assessed for eligibility, and 27 were included in the scoping review. The results that emerged from the review were interpreted and grouped into three macro strategies (simulations-based education, art and visual thinking, and other learning approaches) and nineteen educational interventions. + Among the different strategies, the simulations are the most used. Despite this, our scoping review reveals that is necessary to use different teaching strategies to stimulate critical thinking, improve diagnostic reasoning, refine clinical judgment, and strengthen decision-making. However, it is not possible to demonstrate which methodology is more effective in obtaining the learning outcomes necessary to acquire an adequate level of judgment and critical thinking. Therefore, it will be necessary to relate teaching methodologies with the skills developed. + Copyright © 2023 Elsevier Ltd. All rights reserved. + + + + Giuffrida + Silvia + S + + Department of Cardiology and Cardiac Surgery, Cardio Centro Ticino Institute, Ente Ospedaliero Cantonale, Lugano, Switzerland. Electronic address: silvia.giuffrida@eoc.ch. + + + + Silano + Verdiana + V + + Nursing Direction of Settore Anziani Città di Bellinzona, Bellinzona, Switzerland. Electronic address: verdiana.silano@hotmail.it. + + + + Ramacciati + Nicola + N + + Department of Pharmacy, Health and Nutritional Sciences (DFSSN), University of Calabria, Rende, Italy. Electronic address: nicola.ramacciati@unical.it. + + + + Prandi + Cesarina + C + + Department of Business Economics, Health and Social Care (DEASS), University of Applied Sciences and Arts of Southern Switzerland, Manno, Switzerland. Electronic address: cesarina.prandi@supsi.ch. + + + + Baldon + Alessia + A + + Department of Business Economics, Health and Social Care (DEASS), University of Applied Sciences and Arts of Southern Switzerland, Manno, Switzerland. Electronic address: alessia.baldon@supsi.ch. + + + + Bianchi + Monica + M + + Department of Business Economics, Health and Social Care (DEASS), University of Applied Sciences and Arts of Southern Switzerland, Manno, Switzerland. Electronic address: monica.bianchi@supsi.ch. + + + + eng + + Journal Article + Review + + + 2023 + 01 + 17 + +
+ + Scotland + Nurse Educ Pract + 101090848 + 1471-5953 + + IM + + + Humans + + + Advanced Practice Nursing + + + Learning + + + Curriculum + + + Thinking + + + Clinical Reasoning + + + Students, Nursing + + + + Advanced practice nursing + Clinical reasoning + Critical thinking + Educational strategies + Nursing education + Teaching methodology + + Declaration of Competing Interest The authors declare that they have no known competing financial interests or personal relationships that could have appeared to influence the work reported in this paper. +
+ + + + 2022 + 11 + 9 + + + 2022 + 12 + 17 + + + 2023 + 1 + 10 + + + 2023 + 1 + 29 + 6 + 0 + + + 2023 + 2 + 25 + 6 + 0 + + + 2023 + 1 + 28 + 18 + 7 + + + ppublish + + 36708638 + 10.1016/j.nepr.2023.103548 + S1471-5953(23)00010-0 + + +
+
\ No newline at end of file diff --git a/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-search-test2.xml b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-search-test2.xml new file mode 100644 index 0000000000..949d3b1250 --- /dev/null +++ b/dspace-server-webapp/src/test/resources/org/dspace/app/rest/pubmedimport-search-test2.xml @@ -0,0 +1,132 @@ + + + + + + 21975942 + + 2012 + 01 + 13 + + + 2016 + 10 + 21 + +
+ + 1934-8258 + + Chapter 6 + + 2011 + Oct + + + Current protocols in human genetics + Curr Protoc Hum Genet + + Searching NCBI Databases Using Entrez. + + Unit6.10 + Unit6.10 + + 10.1002/0471142905.hg0610s71 + + One of the most widely used interfaces for the retrieval of information from biological databases is the NCBI Entrez system. Entrez capitalizes on the fact that there are pre-existing, logical relationships between the individual entries found in numerous public databases. The existence of such natural connections, mostly biological in nature, argued for the development of a method through which all the information about a particular biological entity could be found without having to sequentially visit and query disparate databases. Two basic protocols describe simple, text-based searches, illustrating the types of information that can be retrieved through the Entrez system. An alternate protocol builds upon the first basic protocol, using additional, built-in features of the Entrez system, and providing alternative ways to issue the initial query. The support protocol reviews how to save frequently issued queries. Finally, Cn3D, a structure visualization tool, is also discussed. + © 2011 by John Wiley & Sons, Inc. + + + + Gibney + Gretchen + G + + + Baxevanis + Andreas D + AD + + + eng + + Journal Article + +
+ + United States + Curr Protoc Hum Genet + 101287858 + 1934-8258 + + IM + + + Animals + + + Database Management Systems + + + Databases, Factual + + + Humans + + + Information Storage and Retrieval + methods + + + Internet + + + Molecular Conformation + + + National Library of Medicine (U.S.) + + + PubMed + + + United States + + + User-Computer Interface + + +
+ + + + 2011 + 10 + 7 + 6 + 0 + + + 2011 + 10 + 7 + 6 + 0 + + + 2012 + 1 + 14 + 6 + 0 + + + ppublish + + 21975942 + 10.1002/0471142905.hg0610s71 + + +
+
\ No newline at end of file From eb224eb8096fa9d7173cace2a2b8625d40b807eb Mon Sep 17 00:00:00 2001 From: Alexandre Vryghem Date: Sun, 14 May 2023 21:52:31 +0200 Subject: [PATCH 116/125] 100553: Added stricter metadata field & schema validation --- .../MetadataFieldRestRepository.java | 12 ++-- .../MetadataSchemaRestRepository.java | 6 +- .../rest/MetadataSchemaRestRepositoryIT.java | 16 ++++- .../rest/MetadatafieldRestRepositoryIT.java | 60 +++++++++++++++++-- 4 files changed, 81 insertions(+), 13 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java index eefd6331d1..5152f11902 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/MetadataFieldRestRepository.java @@ -260,14 +260,18 @@ public class MetadataFieldRestRepository extends DSpaceRestRepository Date: Fri, 2 Jun 2023 09:45:52 +0700 Subject: [PATCH 117/125] Tweaks fo test cases. --- .../org/dspace/discovery/DiscoveryIT.java | 36 +++++++++---------- .../utils/RestDiscoverQueryBuilderTest.java | 7 ++-- 2 files changed, 18 insertions(+), 25 deletions(-) diff --git a/dspace-api/src/test/java/org/dspace/discovery/DiscoveryIT.java b/dspace-api/src/test/java/org/dspace/discovery/DiscoveryIT.java index 0c3a52ec79..164525afb1 100644 --- a/dspace-api/src/test/java/org/dspace/discovery/DiscoveryIT.java +++ b/dspace-api/src/test/java/org/dspace/discovery/DiscoveryIT.java @@ -768,6 +768,7 @@ public class DiscoveryIT extends AbstractIntegrationTestWithDatabase { .withTitle("item " + i) .build(); } + context.restoreAuthSystemState(); // Build query with default parameters (except for workspaceConf) DiscoverQuery discoverQuery = SearchUtils.getQueryBuilder() @@ -776,26 +777,21 @@ public class DiscoveryIT extends AbstractIntegrationTestWithDatabase { DiscoverResult result = searchService.search(context, discoverQuery); - if (defaultSortField.getMetadataField().equals("dc_date_accessioned")) { - // Verify that search results are sort by dc_date_accessioned - LinkedList dc_date_accesioneds = result.getIndexableObjects().stream() - .map(o -> ((Item) o.getIndexedObject()).getMetadata()) - .map(l -> l.stream().filter(m -> m.getMetadataField().toString().equals("dc_date_accessioned")) - .map(m -> m.getValue()).findFirst().orElse("") - ) - .collect(Collectors.toCollection(LinkedList::new)); - assertFalse(dc_date_accesioneds.isEmpty()); - for (int i = 1; i < dc_date_accesioneds.size() - 1; i++) { - assertTrue(dc_date_accesioneds.get(i).compareTo(dc_date_accesioneds.get(i + 1)) >= 0); - } - } else if (defaultSortField.getMetadataField().equals("lastModified")) { - LinkedList lastModifieds = result.getIndexableObjects().stream() - .map(o -> ((Item) o.getIndexedObject()).getLastModified().toString()) - .collect(Collectors.toCollection(LinkedList::new)); - assertFalse(lastModifieds.isEmpty()); - for (int i = 1; i < lastModifieds.size() - 1; i++) { - assertTrue(lastModifieds.get(i).compareTo(lastModifieds.get(i + 1)) >= 0); - } + /* + // code example for testing against sort by dc_date_accessioned + LinkedList dc_date_accesioneds = result.getIndexableObjects().stream() + .map(o -> ((Item) o.getIndexedObject()).getMetadata()) + .map(l -> l.stream().filter(m -> m.getMetadataField().toString().equals("dc_date_accessioned")) + .map(m -> m.getValue()).findFirst().orElse("") + ) + .collect(Collectors.toCollection(LinkedList::new)); + }*/ + LinkedList lastModifieds = result.getIndexableObjects().stream() + .map(o -> ((Item) o.getIndexedObject()).getLastModified().toString()) + .collect(Collectors.toCollection(LinkedList::new)); + assertFalse(lastModifieds.isEmpty()); + for (int i = 1; i < lastModifieds.size() - 1; i++) { + assertTrue(lastModifieds.get(i).compareTo(lastModifieds.get(i + 1)) >= 0); } } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/utils/RestDiscoverQueryBuilderTest.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/utils/RestDiscoverQueryBuilderTest.java index 511bb8f98b..e21f395f09 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/utils/RestDiscoverQueryBuilderTest.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/utils/RestDiscoverQueryBuilderTest.java @@ -171,15 +171,12 @@ public class RestDiscoverQueryBuilderTest { @Test public void testSortByDefaultSortField() throws Exception { - page = PageRequest.of(2, 10, Sort.Direction.DESC, "dc.date.accessioned"); + page = PageRequest.of(2, 10); restQueryBuilder.buildQuery(context, null, discoveryConfiguration, null, null, emptyList(), page); verify(discoverQueryBuilder, times(1)) .buildQuery(context, null, discoveryConfiguration, null, emptyList(), emptyList(), - page.getPageSize(), page.getOffset(), - discoveryConfiguration.getSearchSortConfiguration().getDefaultSortField().getMetadataField(), - discoveryConfiguration.getSearchSortConfiguration().getDefaultSortField() - .getDefaultSortOrder().name().toUpperCase()); + page.getPageSize(), page.getOffset(), null, null); } @Test(expected = DSpaceBadRequestException.class) From 7c85b007c027c959217b3b9bc5d730006223d448 Mon Sep 17 00:00:00 2001 From: Francesco Pio Scognamiglio Date: Fri, 2 Jun 2023 10:10:45 +0200 Subject: [PATCH 118/125] [DURACOM-149] remove trailing whitespace --- .../dspace/app/rest/PubmedImportMetadataSourceServiceIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/PubmedImportMetadataSourceServiceIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/PubmedImportMetadataSourceServiceIT.java index 79b8ec3f72..3b39d25121 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/PubmedImportMetadataSourceServiceIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/PubmedImportMetadataSourceServiceIT.java @@ -30,7 +30,7 @@ import org.springframework.beans.factory.annotation.Autowired; /** * Integration tests for {@link PubmedImportMetadataSourceServiceImpl} - * + * * @author Francesco Pio Scognamiglio (francescopio.scognamiglio at 4science.com) */ public class PubmedImportMetadataSourceServiceIT extends AbstractLiveImportIntegrationTest { From a38ff421694a5be590a41928985e3f8cd54c1b37 Mon Sep 17 00:00:00 2001 From: Alan Orth Date: Thu, 25 May 2023 14:52:11 +0300 Subject: [PATCH 119/125] dspace: capture publisher from CrossRef live import Publisher is a required field on CrossRef so we can always rely on capturing this information when doing a live import. See: https://github.com/CrossRef/rest-api-doc/blob/master/api_format.md --- .../app/rest/CrossRefImportMetadataSourceServiceIT.java | 8 +++++++- dspace/config/spring/api/crossref-integration.xml | 9 +++++++++ 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/CrossRefImportMetadataSourceServiceIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/CrossRefImportMetadataSourceServiceIT.java index 11fe58ac1d..72524709ec 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/CrossRefImportMetadataSourceServiceIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/CrossRefImportMetadataSourceServiceIT.java @@ -153,6 +153,8 @@ public class CrossRefImportMetadataSourceServiceIT extends AbstractLiveImportInt MetadatumDTO issn = createMetadatumDTO("dc", "identifier", "issn", "2415-3060"); MetadatumDTO volume = createMetadatumDTO("oaire", "citation", "volume", "1"); MetadatumDTO issue = createMetadatumDTO("oaire", "citation", "issue", "2"); + MetadatumDTO publisher = createMetadatumDTO("dc", "publisher", null, + "Petro Mohyla Black Sea National University"); metadatums.add(title); metadatums.add(author); @@ -163,6 +165,7 @@ public class CrossRefImportMetadataSourceServiceIT extends AbstractLiveImportInt metadatums.add(issn); metadatums.add(volume); metadatums.add(issue); + metadatums.add(publisher); ImportRecord firstrRecord = new ImportRecord(metadatums); @@ -179,6 +182,8 @@ public class CrossRefImportMetadataSourceServiceIT extends AbstractLiveImportInt MetadatumDTO issn2 = createMetadatumDTO("dc", "identifier", "issn", "2415-3060"); MetadatumDTO volume2 = createMetadatumDTO("oaire", "citation", "volume", "1"); MetadatumDTO issue2 = createMetadatumDTO("oaire", "citation", "issue", "2"); + MetadatumDTO publisher2 = createMetadatumDTO("dc", "publisher", null, + "Petro Mohyla Black Sea National University"); metadatums2.add(title2); metadatums2.add(author2); @@ -189,6 +194,7 @@ public class CrossRefImportMetadataSourceServiceIT extends AbstractLiveImportInt metadatums2.add(issn2); metadatums2.add(volume2); metadatums2.add(issue2); + metadatums2.add(publisher2); ImportRecord secondRecord = new ImportRecord(metadatums2); records.add(firstrRecord); @@ -196,4 +202,4 @@ public class CrossRefImportMetadataSourceServiceIT extends AbstractLiveImportInt return records; } -} \ No newline at end of file +} diff --git a/dspace/config/spring/api/crossref-integration.xml b/dspace/config/spring/api/crossref-integration.xml index 5d67c17626..d1e416d2b0 100644 --- a/dspace/config/spring/api/crossref-integration.xml +++ b/dspace/config/spring/api/crossref-integration.xml @@ -30,6 +30,7 @@ + @@ -137,6 +138,14 @@ + + + + + + + + From a0a1844de7c6aa4fdc5a743b402e1c056c57ace5 Mon Sep 17 00:00:00 2001 From: Tim Donohue Date: Mon, 5 Jun 2023 16:01:31 -0500 Subject: [PATCH 120/125] Fix test class compilation --- .../src/test/java/org/dspace/discovery/DiscoveryIT.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/dspace-api/src/test/java/org/dspace/discovery/DiscoveryIT.java b/dspace-api/src/test/java/org/dspace/discovery/DiscoveryIT.java index 164525afb1..55be531418 100644 --- a/dspace-api/src/test/java/org/dspace/discovery/DiscoveryIT.java +++ b/dspace-api/src/test/java/org/dspace/discovery/DiscoveryIT.java @@ -7,6 +7,7 @@ */ package org.dspace.discovery; +import static org.dspace.discovery.SolrServiceWorkspaceWorkflowRestrictionPlugin.DISCOVER_WORKSPACE_CONFIGURATION_NAME; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; @@ -746,8 +747,8 @@ public class DiscoveryIT extends AbstractIntegrationTestWithDatabase { */ @Test public void searchWithDefaultSortServiceTest() throws SearchServiceException { - - DiscoveryConfiguration workspaceConf = SearchUtils.getDiscoveryConfiguration("workspace", null); + DiscoveryConfiguration workspaceConf = + SearchUtils.getDiscoveryConfiguration(context, DISCOVER_WORKSPACE_CONFIGURATION_NAME, null); // Skip if no default sort option set for workspaceConf if (workspaceConf.getSearchSortConfiguration().getDefaultSortField() == null) { return; From 14bb32036c2195802a7db7d790e5994244618062 Mon Sep 17 00:00:00 2001 From: Andrea Bollini Date: Tue, 6 Jun 2023 16:01:04 +0200 Subject: [PATCH 121/125] DURACOM-136 add explaination of the commandLineParameters in the javadoc --- .../org/dspace/scripts/configuration/ScriptConfiguration.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java b/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java index e22063eb49..642409a924 100644 --- a/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java +++ b/dspace-api/src/main/java/org/dspace/scripts/configuration/ScriptConfiguration.java @@ -87,6 +87,8 @@ public abstract class ScriptConfiguration implements B * to the currentUser in the context being an admin, however this can be overwritten by each script individually * if different rules apply * @param context The relevant DSpace context + * @param commandLineParameters the parameters that will be used to start the process if known, + * null otherwise * @return A boolean indicating whether the script is allowed to execute or not */ public boolean isAllowedToExecute(Context context, List commandLineParameters) { From 2b523ba5ac1cbaeaf8bccd9f5b575e7564e14aae Mon Sep 17 00:00:00 2001 From: Andrea Bollini Date: Tue, 6 Jun 2023 16:01:47 +0200 Subject: [PATCH 122/125] DURACOM-136 improve handling and testing of invalid mimetype --- .../app/rest/ScriptProcessesController.java | 18 ++++++++++++++++-- .../app/rest/ScriptRestRepositoryIT.java | 14 +++++++++++++- 2 files changed, 29 insertions(+), 3 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ScriptProcessesController.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ScriptProcessesController.java index 3aeec9535b..70149bbb6b 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/ScriptProcessesController.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/ScriptProcessesController.java @@ -19,9 +19,12 @@ import org.dspace.app.rest.model.hateoas.ProcessResource; import org.dspace.app.rest.repository.ScriptRestRepository; import org.dspace.app.rest.utils.ContextUtil; import org.dspace.core.Context; +import org.dspace.scripts.configuration.ScriptConfiguration; +import org.dspace.scripts.service.ScriptService; import org.dspace.services.RequestService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.rest.webmvc.ControllerUtils; +import org.springframework.data.rest.webmvc.ResourceNotFoundException; import org.springframework.hateoas.RepresentationModel; import org.springframework.http.HttpHeaders; import org.springframework.http.HttpStatus; @@ -50,6 +53,9 @@ public class ScriptProcessesController { @Autowired private ScriptRestRepository scriptRestRepository; + @Autowired + private ScriptService scriptService; + @Autowired private RequestService requestService; @@ -80,9 +86,17 @@ public class ScriptProcessesController { @RequestMapping(method = RequestMethod.POST, consumes = "!" + MediaType.MULTIPART_FORM_DATA_VALUE) @PreAuthorize("hasAuthority('AUTHENTICATED')") public ResponseEntity> startProcessInvalidMimeType( - @PathVariable(name = "name") String scriptName, - @RequestParam(name = "file", required = false) List files) + @PathVariable(name = "name") String scriptName) throws Exception { + if (log.isTraceEnabled()) { + log.trace("Starting Process for Script with name: " + scriptName); + } + Context context = ContextUtil.obtainContext(requestService.getCurrentRequest().getHttpServletRequest()); + ScriptConfiguration scriptToExecute = scriptService.getScriptConfiguration(scriptName); + + if (scriptToExecute == null) { + throw new ResourceNotFoundException("The script for name: " + scriptName + " wasn't found"); + } throw new DSpaceBadRequestException("Invalid mimetype"); } diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ScriptRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ScriptRestRepositoryIT.java index 16e691ef6f..29a0076d0c 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ScriptRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ScriptRestRepositoryIT.java @@ -134,6 +134,13 @@ public class ScriptRestRepositoryIT extends AbstractControllerIntegrationTest { .andExpect(jsonPath("$.page.totalElements", is(0))); } + @Test + public void findAllScriptsAnonymousUserTest() throws Exception { + getClient().perform(get("/api/system/scripts")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.page.totalElements", is(0))); + } + @Test public void findAllScriptsLocalAdminsTest() throws Exception { context.turnOffAuthorisationSystem(); @@ -543,8 +550,13 @@ public class ScriptRestRepositoryIT extends AbstractControllerIntegrationTest { public void postProcessAdminWithWrongContentTypeBadRequestException() throws Exception { String token = getAuthToken(admin.getEmail(), password); + + getClient(token) + .perform(post("/api/system/scripts/mock-script/processes")) + .andExpect(status().isBadRequest()); + getClient(token).perform(post("/api/system/scripts/mock-script-invalid/processes")) - .andExpect(status().isBadRequest()); + .andExpect(status().isNotFound()); } @Test From 32cd24b7538694ec964289bb13027d09ae06b829 Mon Sep 17 00:00:00 2001 From: Andrea Bollini Date: Tue, 6 Jun 2023 19:45:31 +0200 Subject: [PATCH 123/125] DURACOM-136 restrict script endpoints to authenticated users, add test to proof that standard script are reseved to site administrator --- .../rest/repository/ScriptRestRepository.java | 4 +- .../app/rest/ScriptRestRepositoryIT.java | 42 +++++++++++++++++-- 2 files changed, 41 insertions(+), 5 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ScriptRestRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ScriptRestRepository.java index 2fc996a327..09d65590b6 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ScriptRestRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ScriptRestRepository.java @@ -59,7 +59,7 @@ public class ScriptRestRepository extends DSpaceRestRepository findAll(Context context, Pageable pageable) { List scriptConfigurations = scriptService.getScriptConfigurations(context); diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ScriptRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ScriptRestRepositoryIT.java index 29a0076d0c..42c9f2c9f7 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ScriptRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ScriptRestRepositoryIT.java @@ -136,9 +136,9 @@ public class ScriptRestRepositoryIT extends AbstractControllerIntegrationTest { @Test public void findAllScriptsAnonymousUserTest() throws Exception { + // this should be changed once we allow anonymous user to execute some scripts getClient().perform(get("/api/system/scripts")) - .andExpect(status().isOk()) - .andExpect(jsonPath("$.page.totalElements", is(0))); + .andExpect(status().isUnauthorized()); } @Test @@ -358,12 +358,48 @@ public class ScriptRestRepositoryIT extends AbstractControllerIntegrationTest { .andExpect(status().isNotFound()); } + /** + * This test will create a basic structure of communities, collections and items with some local admins at each + * level and verify that the local admins, nor generic users can run scripts reserved to administrator + * (i.e. default one that don't override the default + * {@link ScriptConfiguration#isAllowedToExecute(org.dspace.core.Context, List)} method implementation + */ @Test public void postProcessNonAdminAuthorizeException() throws Exception { - String token = getAuthToken(eperson.getEmail(), password); + context.turnOffAuthorisationSystem(); + EPerson comAdmin = EPersonBuilder.createEPerson(context) + .withEmail("comAdmin@example.com") + .withPassword(password).build(); + EPerson colAdmin = EPersonBuilder.createEPerson(context) + .withEmail("colAdmin@example.com") + .withPassword(password).build(); + EPerson itemAdmin = EPersonBuilder.createEPerson(context) + .withEmail("itemAdmin@example.com") + .withPassword(password).build(); + Community community = CommunityBuilder.createCommunity(context) + .withName("Community") + .withAdminGroup(comAdmin) + .build(); + Collection collection = CollectionBuilder.createCollection(context, community) + .withName("Collection") + .withAdminGroup(colAdmin) + .build(); + Item item = ItemBuilder.createItem(context, collection).withAdminUser(itemAdmin) + .withTitle("Test item to curate").build(); + context.restoreAuthSystemState(); + String token = getAuthToken(eperson.getEmail(), password); + String comAdmin_token = getAuthToken(eperson.getEmail(), password); + String colAdmin_token = getAuthToken(eperson.getEmail(), password); + String itemAdmin_token = getAuthToken(eperson.getEmail(), password); getClient(token).perform(multipart("/api/system/scripts/mock-script/processes")) .andExpect(status().isForbidden()); + getClient(comAdmin_token).perform(multipart("/api/system/scripts/mock-script/processes")) + .andExpect(status().isForbidden()); + getClient(colAdmin_token).perform(multipart("/api/system/scripts/mock-script/processes")) + .andExpect(status().isForbidden()); + getClient(itemAdmin_token).perform(multipart("/api/system/scripts/mock-script/processes")) + .andExpect(status().isForbidden()); } @Test From 5a8c7a397c342c5f931b209395965bbc365dfa3f Mon Sep 17 00:00:00 2001 From: Andrea Bollini Date: Tue, 6 Jun 2023 20:12:23 +0200 Subject: [PATCH 124/125] DURACOM-136 open endpoints to retrieve files of process to the user that has triggered the process --- .../ProcessFileTypesLinkRepository.java | 2 +- .../ProcessFilesLinkRepository.java | 2 +- .../ProcessOutputLinkRepository.java | 2 +- .../app/rest/ProcessRestRepositoryIT.java | 81 ++++++++++++++----- 4 files changed, 66 insertions(+), 21 deletions(-) diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ProcessFileTypesLinkRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ProcessFileTypesLinkRepository.java index 8eb8d7ef65..16c8115b29 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ProcessFileTypesLinkRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ProcessFileTypesLinkRepository.java @@ -47,7 +47,7 @@ public class ProcessFileTypesLinkRepository extends AbstractDSpaceRestRepository * @throws SQLException If something goes wrong * @throws AuthorizeException If something goes wrong */ - @PreAuthorize("hasAuthority('ADMIN')") + @PreAuthorize("hasPermission(#processId, 'PROCESS', 'READ')") public ProcessFileTypesRest getFileTypesFromProcess(@Nullable HttpServletRequest request, Integer processId, @Nullable Pageable optionalPageable, diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ProcessFilesLinkRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ProcessFilesLinkRepository.java index 42fcef0d62..5d8251cf19 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ProcessFilesLinkRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ProcessFilesLinkRepository.java @@ -47,7 +47,7 @@ public class ProcessFilesLinkRepository extends AbstractDSpaceRestRepository imp * @throws SQLException If something goes wrong * @throws AuthorizeException If something goes wrong */ - @PreAuthorize("hasAuthority('ADMIN')") + @PreAuthorize("hasPermission(#processId, 'PROCESS', 'READ')") public Page getFilesFromProcess(@Nullable HttpServletRequest request, Integer processId, @Nullable Pageable optionalPageable, diff --git a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ProcessOutputLinkRepository.java b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ProcessOutputLinkRepository.java index f9f665d14f..f5b3edced2 100644 --- a/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ProcessOutputLinkRepository.java +++ b/dspace-server-webapp/src/main/java/org/dspace/app/rest/repository/ProcessOutputLinkRepository.java @@ -50,7 +50,7 @@ public class ProcessOutputLinkRepository extends AbstractDSpaceRestRepository im * @throws SQLException If something goes wrong * @throws AuthorizeException If something goes wrong */ - @PreAuthorize("hasAuthority('ADMIN')") + @PreAuthorize("hasPermission(#processId, 'PROCESS', 'READ')") public BitstreamRest getOutputFromProcess(@Nullable HttpServletRequest request, Integer processId, @Nullable Pageable optionalPageable, diff --git a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ProcessRestRepositoryIT.java b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ProcessRestRepositoryIT.java index d76e20b23d..670d8e2f35 100644 --- a/dspace-server-webapp/src/test/java/org/dspace/app/rest/ProcessRestRepositoryIT.java +++ b/dspace-server-webapp/src/test/java/org/dspace/app/rest/ProcessRestRepositoryIT.java @@ -222,22 +222,35 @@ public class ProcessRestRepositoryIT extends AbstractControllerIntegrationTest { @Test public void getProcessFiles() throws Exception { + context.setCurrentUser(eperson); Process newProcess = ProcessBuilder.createProcess(context, eperson, "mock-script", new LinkedList<>()).build(); - try (InputStream is = IOUtils.toInputStream("Test File For Process", CharEncoding.UTF_8)) { - processService.appendFile(context, process, is, "inputfile", "test.csv"); + processService.appendFile(context, newProcess, is, "inputfile", "test.csv"); } - Bitstream bitstream = processService.getBitstream(context, process, "inputfile"); + Bitstream bitstream = processService.getBitstream(context, newProcess, "inputfile"); String token = getAuthToken(admin.getEmail(), password); - getClient(token).perform(get("/api/system/processes/" + process.getID() + "/files")) + getClient(token).perform(get("/api/system/processes/" + newProcess.getID() + "/files")) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.files[0].name", is("test.csv"))) .andExpect(jsonPath("$._embedded.files[0].uuid", is(bitstream.getID().toString()))) .andExpect(jsonPath("$._embedded.files[0].metadata['dspace.process.filetype']" + "[0].value", is("inputfile"))); - + getClient(token).perform(get("/api/core/bitstreams/" + bitstream.getID() + "/content")) + .andExpect(status().isOk()); + // also the user that triggered the process should be able to access the process' files + String epersonToken = getAuthToken(eperson.getEmail(), password); + getClient(epersonToken) + .perform(get("/api/system/processes/" + newProcess.getID() + "/files")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.files[0].name", is("test.csv"))) + .andExpect(jsonPath("$._embedded.files[0].uuid", is(bitstream.getID().toString()))) + .andExpect(jsonPath("$._embedded.files[0].metadata['dspace.process.filetype']" + + "[0].value", is("inputfile"))); + getClient(epersonToken) + .perform(get("/api/core/bitstreams/" + bitstream.getID() + "/content")) + .andExpect(status().isOk()); } @Test @@ -245,25 +258,34 @@ public class ProcessRestRepositoryIT extends AbstractControllerIntegrationTest { Process newProcess = ProcessBuilder.createProcess(context, eperson, "mock-script", new LinkedList<>()).build(); try (InputStream is = IOUtils.toInputStream("Test File For Process", CharEncoding.UTF_8)) { - processService.appendFile(context, process, is, "inputfile", "test.csv"); + processService.appendFile(context, newProcess, is, "inputfile", "test.csv"); } - Bitstream bitstream = processService.getBitstream(context, process, "inputfile"); + Bitstream bitstream = processService.getBitstream(context, newProcess, "inputfile"); String token = getAuthToken(admin.getEmail(), password); - getClient(token).perform(get("/api/system/processes/" + process.getID() + "/files/inputfile")) + getClient(token).perform(get("/api/system/processes/" + newProcess.getID() + "/files/inputfile")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$._embedded.bitstreams[0].name", is("test.csv"))) + .andExpect(jsonPath("$._embedded.bitstreams[0].uuid", is(bitstream.getID().toString()))) + .andExpect(jsonPath("$._embedded.bitstreams[0].metadata['dspace.process.filetype']" + + "[0].value", is("inputfile"))); + // also the user that triggered the process should be able to access the process' files + String epersonToken = getAuthToken(eperson.getEmail(), password); + getClient(epersonToken) + .perform(get("/api/system/processes/" + newProcess.getID() + "/files/inputfile")) .andExpect(status().isOk()) .andExpect(jsonPath("$._embedded.bitstreams[0].name", is("test.csv"))) .andExpect(jsonPath("$._embedded.bitstreams[0].uuid", is(bitstream.getID().toString()))) .andExpect(jsonPath("$._embedded.bitstreams[0].metadata['dspace.process.filetype']" + "[0].value", is("inputfile"))); - } @Test public void getProcessFilesTypes() throws Exception { + Process newProcess = ProcessBuilder.createProcess(context, eperson, "mock-script", new LinkedList<>()).build(); try (InputStream is = IOUtils.toInputStream("Test File For Process", CharEncoding.UTF_8)) { - processService.appendFile(context, process, is, "inputfile", "test.csv"); + processService.appendFile(context, newProcess, is, "inputfile", "test.csv"); } List fileTypesToCheck = new LinkedList<>(); @@ -271,12 +293,18 @@ public class ProcessRestRepositoryIT extends AbstractControllerIntegrationTest { String token = getAuthToken(admin.getEmail(), password); - getClient(token).perform(get("/api/system/processes/" + process.getID() + "/filetypes")) + getClient(token).perform(get("/api/system/processes/" + newProcess.getID() + "/filetypes")) .andExpect(status().isOk()) .andExpect(jsonPath("$", ProcessFileTypesMatcher - .matchProcessFileTypes("filetypes-" + process.getID(), fileTypesToCheck))); - + .matchProcessFileTypes("filetypes-" + newProcess.getID(), fileTypesToCheck))); + // also the user that triggered the process should be able to access the process' files + String epersonToken = getAuthToken(eperson.getEmail(), password); + getClient(epersonToken) + .perform(get("/api/system/processes/" + newProcess.getID() + "/filetypes")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$", ProcessFileTypesMatcher + .matchProcessFileTypes("filetypes-" + newProcess.getID(), fileTypesToCheck))); } @Test @@ -811,25 +839,42 @@ public class ProcessRestRepositoryIT extends AbstractControllerIntegrationTest { @Test public void getProcessOutput() throws Exception { + context.setCurrentUser(eperson); + Process process1 = ProcessBuilder.createProcess(context, eperson, "mock-script", parameters) + .withStartAndEndTime("10/01/1990", "20/01/1990") + .build(); + try (InputStream is = IOUtils.toInputStream("Test File For Process", CharEncoding.UTF_8)) { - processService.appendLog(process.getID(), process.getName(), "testlog", ProcessLogLevel.INFO); + processService.appendLog(process1.getID(), process1.getName(), "testlog", ProcessLogLevel.INFO); } - processService.createLogBitstream(context, process); + processService.createLogBitstream(context, process1); List fileTypesToCheck = new LinkedList<>(); fileTypesToCheck.add("inputfile"); String token = getAuthToken(admin.getEmail(), password); - getClient(token).perform(get("/api/system/processes/" + process.getID() + "/output")) + getClient(token).perform(get("/api/system/processes/" + process1.getID() + "/output")) .andExpect(status().isOk()) .andExpect(jsonPath("$.name", - is(process.getName() + process.getID() + ".log"))) + is(process1.getName() + process1.getID() + ".log"))) .andExpect(jsonPath("$.type", is("bitstream"))) .andExpect(jsonPath("$.metadata['dc.title'][0].value", - is(process.getName() + process.getID() + ".log"))) + is(process1.getName() + process1.getID() + ".log"))) .andExpect(jsonPath("$.metadata['dspace.process.filetype'][0].value", is("script_output"))); + String epersonToken = getAuthToken(eperson.getEmail(), password); + + getClient(epersonToken) + .perform(get("/api/system/processes/" + process1.getID() + "/output")) + .andExpect(status().isOk()) + .andExpect(jsonPath("$.name", + is(process1.getName() + process1.getID() + ".log"))) + .andExpect(jsonPath("$.type", is("bitstream"))) + .andExpect(jsonPath("$.metadata['dc.title'][0].value", + is(process1.getName() + process1.getID() + ".log"))) + .andExpect(jsonPath("$.metadata['dspace.process.filetype'][0].value", + is("script_output"))); } } From 7757c4e898373fc06b03e070eca67bd58d2ad4cf Mon Sep 17 00:00:00 2001 From: Francesco Pio Scognamiglio Date: Thu, 8 Jun 2023 13:50:11 +0200 Subject: [PATCH 125/125] [DURACOM-153] fix validation to use the retrieved zip file on saf import --- .../src/main/java/org/dspace/app/itemimport/ItemImport.java | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java index c94e163243..b32de11f7a 100644 --- a/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java +++ b/dspace-api/src/main/java/org/dspace/app/itemimport/ItemImport.java @@ -332,17 +332,19 @@ public class ItemImport extends DSpaceRunnable { */ protected void readZip(Context context, ItemImportService itemImportService) throws Exception { Optional optionalFileStream = Optional.empty(); + Optional validationFileStream = Optional.empty(); if (!remoteUrl) { // manage zip via upload optionalFileStream = handler.getFileStream(context, zipfilename); + validationFileStream = handler.getFileStream(context, zipfilename); } else { // manage zip via remote url optionalFileStream = Optional.ofNullable(new URL(zipfilename).openStream()); + validationFileStream = Optional.ofNullable(new URL(zipfilename).openStream()); } - if (optionalFileStream.isPresent()) { + if (validationFileStream.isPresent()) { // validate zip file - Optional validationFileStream = handler.getFileStream(context, zipfilename); if (validationFileStream.isPresent()) { validateZip(validationFileStream.get()); }