Clean up a lot of ErrorProne and Checkstyle warnings. #2983

So I can see what else is wrong.
This commit is contained in:
Mark H. Wood
2020-07-23 13:00:36 -04:00
parent b14f52313e
commit 0076e6b37b
16 changed files with 65 additions and 59 deletions

View File

@@ -22,16 +22,16 @@ import org.dspace.content.factory.ContentServiceFactory;
import org.dspace.content.service.CollectionService; import org.dspace.content.service.CollectionService;
import org.dspace.content.service.CommunityService; import org.dspace.content.service.CommunityService;
import org.dspace.content.service.ItemService; import org.dspace.content.service.ItemService;
import org.dspace.services.ConfigurationService;
import org.dspace.services.factory.DSpaceServicesFactory;
import org.junit.Test; import org.junit.Test;
public class MetadataImportTest extends AbstractIntegrationTest { public class MetadataImportTest extends AbstractIntegrationTest {
private ItemService itemService = ContentServiceFactory.getInstance().getItemService(); private final ItemService itemService
private CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService(); = ContentServiceFactory.getInstance().getItemService();
private CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService(); private final CollectionService collectionService
private ConfigurationService configurationService = DSpaceServicesFactory.getInstance().getConfigurationService(); = ContentServiceFactory.getInstance().getCollectionService();
private final CommunityService communityService
= ContentServiceFactory.getInstance().getCommunityService();
@Test @Test
public void metadataImportTest() throws Exception { public void metadataImportTest() throws Exception {

View File

@@ -13,6 +13,8 @@ import org.dspace.service.DSpaceCRUDService;
/** /**
* @author Jonas Van Goolen - (jonas@atmire.com) * @author Jonas Van Goolen - (jonas@atmire.com)
*
* @param <T> A specific kind of ReloadableEntity.
*/ */
public abstract class AbstractCRUDBuilder<T extends ReloadableEntity> extends AbstractBuilder<T, DSpaceCRUDService> { public abstract class AbstractCRUDBuilder<T extends ReloadableEntity> extends AbstractBuilder<T, DSpaceCRUDService> {
@@ -20,8 +22,10 @@ public abstract class AbstractCRUDBuilder<T extends ReloadableEntity> extends Ab
super(context); super(context);
} }
@Override
protected abstract DSpaceCRUDService getService(); protected abstract DSpaceCRUDService getService();
@Override
public abstract T build(); public abstract T build();
public void delete(T dso) throws Exception { public void delete(T dso) throws Exception {

View File

@@ -43,12 +43,15 @@ public abstract class AbstractDSpaceObjectBuilder<T extends DSpaceObject>
this.context = context; this.context = context;
} }
@Override
public abstract void cleanup() throws Exception; public abstract void cleanup() throws Exception;
@Override
protected abstract DSpaceObjectService<T> getService(); protected abstract DSpaceObjectService<T> getService();
@Override
protected <B> B handleException(final Exception e) { protected <B> B handleException(final Exception e) {
log.error(e.getMessage(), e); log.error(e.getMessage(), e);
return null; return null;
@@ -231,8 +234,10 @@ public abstract class AbstractDSpaceObjectBuilder<T extends DSpaceObject>
return (B) this; return (B) this;
} }
@Override
public abstract T build() throws SQLException, AuthorizeException; public abstract T build() throws SQLException, AuthorizeException;
@Override
public void delete(Context c, T dso) throws Exception { public void delete(Context c, T dso) throws Exception {
if (dso != null) { if (dso != null) {
getService().delete(c, dso); getService().delete(c, dso);

View File

@@ -129,6 +129,7 @@ public class BitstreamBuilder extends AbstractDSpaceObjectBuilder<Bitstream> {
return this; return this;
} }
@Override
public Bitstream build() { public Bitstream build() {
try { try {
bitstreamService.update(context, bitstream); bitstreamService.update(context, bitstream);
@@ -163,6 +164,7 @@ public class BitstreamBuilder extends AbstractDSpaceObjectBuilder<Bitstream> {
} }
} }
@Override
protected DSpaceObjectService<Bitstream> getService() { protected DSpaceObjectService<Bitstream> getService() {
return bitstreamService; return bitstreamService;
} }

View File

@@ -25,7 +25,7 @@ public class BundleBuilder extends AbstractDSpaceObjectBuilder<Bundle> {
private Bundle bundle; private Bundle bundle;
private Item item; private Item item;
private String name; private String name;
private List<Bitstream> bitstreams = new ArrayList<>(); private final List<Bitstream> bitstreams = new ArrayList<>();
protected BundleBuilder(Context context) { protected BundleBuilder(Context context) {
super(context); super(context);
@@ -52,6 +52,7 @@ public class BundleBuilder extends AbstractDSpaceObjectBuilder<Bundle> {
return this; return this;
} }
@Override
public void cleanup() throws Exception { public void cleanup() throws Exception {
try (Context c = new Context()) { try (Context c = new Context()) {
c.turnOffAuthorisationSystem(); c.turnOffAuthorisationSystem();
@@ -64,10 +65,12 @@ public class BundleBuilder extends AbstractDSpaceObjectBuilder<Bundle> {
} }
} }
@Override
protected DSpaceObjectService<Bundle> getService() { protected DSpaceObjectService<Bundle> getService() {
return bundleService; return bundleService;
} }
@Override
public Bundle build() throws SQLException, AuthorizeException { public Bundle build() throws SQLException, AuthorizeException {
bundle = bundleService.create(context, item, name); bundle = bundleService.create(context, item, name);

View File

@@ -10,6 +10,7 @@ package org.dspace.builder;
import java.io.IOException; import java.io.IOException;
import java.sql.SQLException; import java.sql.SQLException;
import java.util.UUID; import java.util.UUID;
import org.apache.log4j.Logger;
import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeException;
import org.dspace.content.service.DSpaceObjectService; import org.dspace.content.service.DSpaceObjectService;
@@ -19,6 +20,7 @@ import org.dspace.eperson.EPerson;
import org.dspace.eperson.Group; import org.dspace.eperson.Group;
public class EPersonBuilder extends AbstractDSpaceObjectBuilder<EPerson> { public class EPersonBuilder extends AbstractDSpaceObjectBuilder<EPerson> {
private static final Logger LOG = Logger.getLogger(EPersonBuilder.class);
private EPerson ePerson; private EPerson ePerson;
@@ -39,20 +41,18 @@ public class EPersonBuilder extends AbstractDSpaceObjectBuilder<EPerson> {
} }
} }
@Override
protected DSpaceObjectService<EPerson> getService() { protected DSpaceObjectService<EPerson> getService() {
return ePersonService; return ePersonService;
} }
@Override
public EPerson build() { public EPerson build() {
try { try {
ePersonService.update(context, ePerson); ePersonService.update(context, ePerson);
indexingService.commit(); indexingService.commit();
} catch (SearchServiceException e) { } catch (SearchServiceException | SQLException | AuthorizeException e) {
e.printStackTrace(); LOG.warn("Failed to complete the EPerson", e);
} catch (SQLException e) {
e.printStackTrace();
} catch (AuthorizeException e) {
e.printStackTrace();
} }
return ePerson; return ePerson;
} }
@@ -65,10 +65,8 @@ public class EPersonBuilder extends AbstractDSpaceObjectBuilder<EPerson> {
private EPersonBuilder create() { private EPersonBuilder create() {
try { try {
ePerson = ePersonService.create(context); ePerson = ePersonService.create(context);
} catch (SQLException e) { } catch (SQLException | AuthorizeException e) {
e.printStackTrace(); LOG.warn("Failed to create the EPerson", e);
} catch (AuthorizeException e) {
e.printStackTrace();
} }
return this; return this;
} }

View File

@@ -53,6 +53,7 @@ public class EntityTypeBuilder extends AbstractBuilder<EntityType, EntityTypeSer
} }
} }
@Override
public EntityType build() { public EntityType build() {
try { try {
@@ -91,7 +92,7 @@ public class EntityTypeBuilder extends AbstractBuilder<EntityType, EntityTypeSer
this.entityType = entityTypeService.create(context, entityType); this.entityType = entityTypeService.create(context, entityType);
} catch (SQLException | AuthorizeException e) { } catch (SQLException | AuthorizeException e) {
e.printStackTrace(); log.warn("Failed to create the EntityType", e);
} }
return this; return this;

View File

@@ -64,17 +64,9 @@ public class MetadataFieldBuilder extends AbstractBuilder<MetadataField, Metadat
context.dispatchEvents(); context.dispatchEvents();
indexingService.commit(); indexingService.commit();
} catch (SearchServiceException e) { } catch (SearchServiceException | SQLException | AuthorizeException
log.error(e); | NonUniqueMetadataException | IOException e) {
} catch (SQLException e) { log.error("Failed to complete MetadataField", e);
log.error(e);
} catch (AuthorizeException e) {
log.error(e);
;
} catch (NonUniqueMetadataException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} }
return metadataField; return metadataField;
} }
@@ -141,7 +133,7 @@ public class MetadataFieldBuilder extends AbstractBuilder<MetadataField, Metadat
metadataField = metadataFieldService metadataField = metadataFieldService
.create(context, schema, element, qualifier, scopeNote); .create(context, schema, element, qualifier, scopeNote);
} catch (NonUniqueMetadataException e) { } catch (NonUniqueMetadataException e) {
e.printStackTrace(); log.error("Failed to create MetadataField", e);
} }
return this; return this;

View File

@@ -63,15 +63,10 @@ public class MetadataSchemaBuilder extends AbstractBuilder<MetadataSchema, Metad
context.dispatchEvents(); context.dispatchEvents();
indexingService.commit(); indexingService.commit();
} catch (SearchServiceException e) { } catch (SearchServiceException | SQLException | AuthorizeException e) {
log.error(e); log.error(e);
} catch (SQLException e) {
log.error(e);
} catch (AuthorizeException e) {
log.error(e);
;
} catch (NonUniqueMetadataException e) { } catch (NonUniqueMetadataException e) {
e.printStackTrace(); log.error("Failed to complete MetadataSchema", e);
} }
return metadataSchema; return metadataSchema;
} }
@@ -123,7 +118,7 @@ public class MetadataSchemaBuilder extends AbstractBuilder<MetadataSchema, Metad
try { try {
metadataSchema = metadataSchemaService.create(context, name, namespace); metadataSchema = metadataSchemaService.create(context, name, namespace);
} catch (NonUniqueMetadataException e) { } catch (NonUniqueMetadataException e) {
e.printStackTrace(); log.error("Failed to create MetadataSchema", e);
} }
return this; return this;

View File

@@ -57,6 +57,7 @@ public class ProcessBuilder extends AbstractBuilder<Process, ProcessService> {
} }
} }
@Override
public Process build() { public Process build() {
try { try {
processService.update(context, process); processService.update(context, process);
@@ -68,6 +69,7 @@ public class ProcessBuilder extends AbstractBuilder<Process, ProcessService> {
return process; return process;
} }
@Override
protected ProcessService getService() { protected ProcessService getService() {
return processService; return processService;
} }

View File

@@ -56,6 +56,7 @@ public class RelationshipBuilder extends AbstractBuilder<Relationship, Relations
} }
} }
@Override
public Relationship build() { public Relationship build() {
try { try {
@@ -117,7 +118,7 @@ public class RelationshipBuilder extends AbstractBuilder<Relationship, Relations
try { try {
relationship = relationshipService.create(context, leftItem, rightItem, relationshipType, 0, 0); relationship = relationshipService.create(context, leftItem, rightItem, relationshipType, 0, 0);
} catch (SQLException | AuthorizeException e) { } catch (SQLException | AuthorizeException e) {
e.printStackTrace(); log.warn("Failed to create relationship", e);
} }
return this; return this;

View File

@@ -62,6 +62,7 @@ public class RelationshipTypeBuilder extends AbstractBuilder<RelationshipType, R
} }
} }
@Override
public RelationshipType build() { public RelationshipType build() {
try { try {
@@ -116,7 +117,7 @@ public class RelationshipTypeBuilder extends AbstractBuilder<RelationshipType, R
leftCardinalityMax, rightCardinalityMin, rightCardinalityMax); leftCardinalityMax, rightCardinalityMin, rightCardinalityMax);
} catch (SQLException | AuthorizeException e) { } catch (SQLException | AuthorizeException e) {
e.printStackTrace(); log.error("Failed to create RelationshipType", e);
} }
return this; return this;

View File

@@ -29,6 +29,7 @@ public class MockCCLicenseConnectorServiceImpl extends CCLicenseConnectorService
* @param language - the language * @param language - the language
* @return a map of mocked licenses with the id and the license * @return a map of mocked licenses with the id and the license
*/ */
@Override
public Map<String, CCLicense> retrieveLicenses(String language) { public Map<String, CCLicense> retrieveLicenses(String language) {
Map<String, CCLicense> ccLicenses = new HashMap<>(); Map<String, CCLicense> ccLicenses = new HashMap<>();
CCLicense mockLicense1 = createMockLicense(1, new int[]{3, 2, 3}); CCLicense mockLicense1 = createMockLicense(1, new int[]{3, 2, 3});
@@ -89,6 +90,7 @@ public class MockCCLicenseConnectorServiceImpl extends CCLicenseConnectorService
* @param answerMap - the answers to the different field questions * @param answerMap - the answers to the different field questions
* @return the CC License URI * @return the CC License URI
*/ */
@Override
public String retrieveRightsByQuestion(final String licenseId, public String retrieveRightsByQuestion(final String licenseId,
final String language, final String language,
final Map<String, String> answerMap) { final Map<String, String> answerMap) {
@@ -105,6 +107,7 @@ public class MockCCLicenseConnectorServiceImpl extends CCLicenseConnectorService
* @return a mock license RDF document or null when the URI contains invalid * @return a mock license RDF document or null when the URI contains invalid
* @throws IOException * @throws IOException
*/ */
@Override
public Document retrieveLicenseRDFDoc(String licenseURI) throws IOException { public Document retrieveLicenseRDFDoc(String licenseURI) throws IOException {
if (!StringUtils.contains(licenseURI, "invalid")) { if (!StringUtils.contains(licenseURI, "invalid")) {
InputStream cclicense = null; InputStream cclicense = null;

View File

@@ -110,7 +110,7 @@ public class MockSolrServer {
server.deleteByQuery("*:*"); server.deleteByQuery("*:*");
server.commit(); server.commit();
} catch (SolrServerException | IOException e) { } catch (SolrServerException | IOException e) {
e.printStackTrace(System.err); log.error("Failed to empty Solr index: {}", e.getMessage(), e);
} }
loadedCores.put(coreName, server); loadedCores.put(coreName, server);

View File

@@ -27,8 +27,6 @@ import com.maxmind.geoip2.record.MaxMind;
import com.maxmind.geoip2.record.Postal; import com.maxmind.geoip2.record.Postal;
import com.maxmind.geoip2.record.RepresentedCountry; import com.maxmind.geoip2.record.RepresentedCountry;
import com.maxmind.geoip2.record.Traits; import com.maxmind.geoip2.record.Traits;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.dspace.solr.MockSolrServer; import org.dspace.solr.MockSolrServer;
import org.springframework.beans.factory.DisposableBean; import org.springframework.beans.factory.DisposableBean;
import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.InitializingBean;
@@ -42,8 +40,6 @@ public class MockSolrLoggerServiceImpl
extends SolrLoggerServiceImpl extends SolrLoggerServiceImpl
implements InitializingBean, DisposableBean { implements InitializingBean, DisposableBean {
private static final Logger log = LogManager.getLogger();
private MockSolrServer mockSolrServer; private MockSolrServer mockSolrServer;
public MockSolrLoggerServiceImpl() { public MockSolrLoggerServiceImpl() {

View File

@@ -11,6 +11,7 @@ import static junit.framework.TestCase.assertEquals;
import static org.junit.Assert.fail; import static org.junit.Assert.fail;
import java.sql.SQLException; import java.sql.SQLException;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.dspace.AbstractUnitTest; import org.dspace.AbstractUnitTest;
@@ -35,9 +36,11 @@ import org.junit.Test;
*/ */
public class XmlWorkflowFactoryTest extends AbstractUnitTest { public class XmlWorkflowFactoryTest extends AbstractUnitTest {
private CollectionService collectionService = ContentServiceFactory.getInstance().getCollectionService(); private final CollectionService collectionService
private CommunityService communityService = ContentServiceFactory.getInstance().getCommunityService(); = ContentServiceFactory.getInstance().getCollectionService();
private XmlWorkflowFactory xmlWorkflowFactory private final CommunityService communityService
= ContentServiceFactory.getInstance().getCommunityService();
private final XmlWorkflowFactory xmlWorkflowFactory
= new DSpace().getServiceManager().getServiceByName("xmlWorkflowFactory", = new DSpace().getServiceManager().getServiceByName("xmlWorkflowFactory",
XmlWorkflowFactoryImpl.class); XmlWorkflowFactoryImpl.class);
private Community owningCommunity; private Community owningCommunity;
@@ -47,7 +50,7 @@ public class XmlWorkflowFactoryTest extends AbstractUnitTest {
/** /**
* log4j category * log4j category
*/ */
private static final Logger log = org.apache.logging.log4j.LogManager.getLogger(XmlWorkflowFactoryTest.class); private static final Logger log = LogManager.getLogger(XmlWorkflowFactoryTest.class);
/** /**
* This method will be run before every test as per @Before. It will * This method will be run before every test as per @Before. It will
@@ -112,12 +115,12 @@ public class XmlWorkflowFactoryTest extends AbstractUnitTest {
@Test @Test
public void workflowMapping_NonMappedCollection() throws WorkflowConfigurationException { public void workflowMapping_NonMappedCollection() throws WorkflowConfigurationException {
Workflow workflow = xmlWorkflowFactory.getWorkflow(this.nonMappedCollection); Workflow workflow = xmlWorkflowFactory.getWorkflow(this.nonMappedCollection);
assertEquals(workflow.getID(), "defaultWorkflow"); assertEquals("defaultWorkflow", workflow.getID());
} }
@Test @Test
public void workflowMapping_MappedCollection() throws WorkflowConfigurationException { public void workflowMapping_MappedCollection() throws WorkflowConfigurationException {
Workflow workflow = xmlWorkflowFactory.getWorkflow(this.mappedCollection); Workflow workflow = xmlWorkflowFactory.getWorkflow(this.mappedCollection);
assertEquals(workflow.getID(), "selectSingleReviewer"); assertEquals("selectSingleReviewer", workflow.getID());
} }
} }