Fix typos discovered by codespell

This commit is contained in:
Christian Clauss
2024-08-05 22:05:13 +02:00
parent 8e82640cb3
commit dbf33f2113
263 changed files with 443 additions and 442 deletions

View File

@@ -18,7 +18,7 @@ import org.dspace.core.Context;
* Configuration properties: (with examples) * Configuration properties: (with examples)
* {@code * {@code
* # values for the forever embargo date threshold * # values for the forever embargo date threshold
* # This threshold date is used in the default access status helper to dermine if an item is * # This threshold date is used in the default access status helper to determine if an item is
* # restricted or embargoed based on the start date of the primary (or first) file policies. * # restricted or embargoed based on the start date of the primary (or first) file policies.
* # In this case, if the policy start date is inferior to the threshold date, the status will * # In this case, if the policy start date is inferior to the threshold date, the status will
* # be embargo, else it will be restricted. * # be embargo, else it will be restricted.

View File

@@ -475,7 +475,7 @@ public class DSpaceCSV implements Serializable {
key = key + "." + metadataField.getQualifier(); key = key + "." + metadataField.getQualifier();
} }
// Add the language if there is one (schema.element.qualifier[langauge]) // Add the language if there is one (schema.element.qualifier[language])
//if ((value.language != null) && (!"".equals(value.language))) //if ((value.language != null) && (!"".equals(value.language)))
if (value.getLanguage() != null) { if (value.getLanguage() != null) {
key = key + "[" + value.getLanguage() + "]"; key = key + "[" + value.getLanguage() + "]";

View File

@@ -253,7 +253,7 @@ public class MetadataImport extends DSpaceRunnable<MetadataImportScriptConfigura
displayChanges(changes, true); displayChanges(changes, true);
} }
// Finsh off and tidy up // Finish off and tidy up
c.restoreAuthSystemState(); c.restoreAuthSystemState();
c.complete(); c.complete();
} catch (Exception e) { } catch (Exception e) {
@@ -1653,7 +1653,7 @@ public class MetadataImport extends DSpaceRunnable<MetadataImportScriptConfigura
.getLabel(); .getLabel();
} else { } else {
// Target item may be archived; check there. // Target item may be archived; check there.
// Add to errors if Realtionship.type cannot be derived // Add to errors if Relationship.type cannot be derived
Item targetItem = null; Item targetItem = null;
if (itemService.find(c, UUID.fromString(targetUUID)) != null) { if (itemService.find(c, UUID.fromString(targetUUID)) != null) {
targetItem = itemService.find(c, UUID.fromString(targetUUID)); targetItem = itemService.find(c, UUID.fromString(targetUUID));
@@ -1698,7 +1698,7 @@ public class MetadataImport extends DSpaceRunnable<MetadataImportScriptConfigura
validateTypesByTypeByTypeName(c, targetType, originType, typeName, originRow); validateTypesByTypeByTypeName(c, targetType, originType, typeName, originRow);
} else { } else {
// Origin item may be archived; check there. // Origin item may be archived; check there.
// Add to errors if Realtionship.type cannot be derived. // Add to errors if Relationship.type cannot be derived.
Item originItem = null; Item originItem = null;
if (itemService.find(c, UUID.fromString(targetUUID)) != null) { if (itemService.find(c, UUID.fromString(targetUUID)) != null) {
DSpaceCSVLine dSpaceCSVLine = this.csv.getCSVLines() DSpaceCSVLine dSpaceCSVLine = this.csv.getCSVLines()

View File

@@ -725,7 +725,7 @@ public class ItemExportServiceImpl implements ItemExportService {
try { try {
emailErrorMessage(eperson, e1.getMessage()); emailErrorMessage(eperson, e1.getMessage());
} catch (Exception e) { } catch (Exception e) {
// wont throw here // won't throw here
} }
throw new IllegalStateException(e1); throw new IllegalStateException(e1);
} finally { } finally {

View File

@@ -69,7 +69,7 @@ public interface ItemExportService {
boolean excludeBitstreams) throws Exception; boolean excludeBitstreams) throws Exception;
/** /**
* Convenience methot to create export a single Community, Collection, or * Convenience method to create export a single Community, Collection, or
* Item * Item
* *
* @param dso - the dspace object to export * @param dso - the dspace object to export
@@ -93,7 +93,7 @@ public interface ItemExportService {
Context context, boolean migrate) throws Exception; Context context, boolean migrate) throws Exception;
/** /**
* Convenience methot to create export a single Community, Collection, or * Convenience method to create export a single Community, Collection, or
* Item * Item
* *
* @param dso - the dspace object to export * @param dso - the dspace object to export
@@ -156,7 +156,7 @@ public interface ItemExportService {
public String getExportWorkDirectory() throws Exception; public String getExportWorkDirectory() throws Exception;
/** /**
* Used to read the export archived. Inteded for download. * Used to read the export archived. Intended for download.
* *
* @param fileName the name of the file to download * @param fileName the name of the file to download
* @param eperson the eperson requesting the download * @param eperson the eperson requesting the download
@@ -233,7 +233,7 @@ public interface ItemExportService {
/** /**
* Since the archive is created in a new thread we are unable to communicate * Since the archive is created in a new thread we are unable to communicate
* with calling method about success or failure. We accomplis this * with calling method about success or failure. We accomplish this
* communication with email instead. Send a success email once the export * communication with email instead. Send a success email once the export
* archive is complete and ready for download * archive is complete and ready for download
* *
@@ -248,7 +248,7 @@ public interface ItemExportService {
/** /**
* Since the archive is created in a new thread we are unable to communicate * Since the archive is created in a new thread we are unable to communicate
* with calling method about success or failure. We accomplis this * with calling method about success or failure. We accomplish this
* communication with email instead. Send an error email if the export * communication with email instead. Send an error email if the export
* archive fails * archive fails
* *

View File

@@ -2210,7 +2210,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
emailErrorMessage(eperson, exceptionString); emailErrorMessage(eperson, exceptionString);
throw new Exception(e.getMessage()); throw new Exception(e.getMessage());
} catch (Exception e2) { } catch (Exception e2) {
// wont throw here // won't throw here
} }
} finally { } finally {
// Make sure the database connection gets closed in all conditions. // Make sure the database connection gets closed in all conditions.

View File

@@ -121,7 +121,7 @@ public interface ItemImportService {
/** /**
* If a batch import is done in a new thread we are unable to communicate * If a batch import is done in a new thread we are unable to communicate
* with calling method about success or failure. We accomplis this * with calling method about success or failure. We accomplish this
* communication with email instead. Send an error email if the batch * communication with email instead. Send an error email if the batch
* import fails * import fails
* *

View File

@@ -217,7 +217,7 @@ public class ItemArchive {
throws SQLException, Exception { throws SQLException, Exception {
DtoMetadata dtom = getMetadataField("dc.identifier.uri"); DtoMetadata dtom = getMetadataField("dc.identifier.uri");
if (dtom == null) { if (dtom == null) {
throw new Exception("No dc.identier.uri field found for handle"); throw new Exception("No dc.identifier.uri field found for handle");
} }
this.addUndoMetadataField(dtom); //seed the undo list with the uri this.addUndoMetadataField(dtom); //seed the undo list with the uri

View File

@@ -10,7 +10,7 @@ package org.dspace.app.itemupdate;
import java.util.Properties; import java.util.Properties;
/** /**
* Bitstream filter targetting the THUMBNAIL bundle * Bitstream filter targeting the THUMBNAIL bundle
*/ */
public class ThumbnailBitstreamFilter extends BitstreamFilterByBundleName { public class ThumbnailBitstreamFilter extends BitstreamFilterByBundleName {

View File

@@ -27,7 +27,7 @@ import org.dspace.services.ConfigurationService;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
/** /**
* Action to send email to receipients provided in actionSendFilter. The email * Action to send email to recipients provided in actionSendFilter. The email
* body will be result of templating actionSendFilter. * body will be result of templating actionSendFilter.
*/ */
public class LDNEmailAction implements LDNAction { public class LDNEmailAction implements LDNAction {

View File

@@ -25,7 +25,7 @@ import com.fasterxml.jackson.annotation.JsonPropertyOrder;
* "Offer", "coar-notify:IngestAction" * "Offer", "coar-notify:IngestAction"
* "Offer", "coar-notify:ReviewAction" * "Offer", "coar-notify:ReviewAction"
* *
* and their acknownledgements - if any * and their acknowledgements - if any
* *
* @author Francesco Bacchelli (francesco.bacchelli at 4science dot it) * @author Francesco Bacchelli (francesco.bacchelli at 4science dot it)
*/ */

View File

@@ -8,7 +8,7 @@
package org.dspace.app.ldn.model; package org.dspace.app.ldn.model;
/** /**
* Informations about the Offer and Acknowledgements targeting a specified Item * Information about the Offer and Acknowledgements targeting a specified Item
* *
* @author Francesco Bacchelli (francesco.bacchelli at 4science.com) * @author Francesco Bacchelli (francesco.bacchelli at 4science.com)
*/ */

View File

@@ -152,7 +152,7 @@ public class LDNMetadataProcessor implements LDNProcessor {
} }
/** /**
* Lookup associated item to the notification context. If UUID in URL, lookup bu * Lookup associated item to the notification context. If UUID in URL, lookup by
* UUID, else lookup by handle. * UUID, else lookup by handle.
* *
* @param context current context * @param context current context

View File

@@ -148,7 +148,7 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter {
// the thumbnail because the CropBox is generally used to define the // the thumbnail because the CropBox is generally used to define the
// area displayed when a user opens the PDF on a screen, whereas the // area displayed when a user opens the PDF on a screen, whereas the
// MediaBox is used for print. Not all PDFs set these correctly, so // MediaBox is used for print. Not all PDFs set these correctly, so
// we can use ImageMagick's default behavior unless we see an explit // we can use ImageMagick's default behavior unless we see an explicit
// CropBox. Note: we don't need to do anything special to detect if // CropBox. Note: we don't need to do anything special to detect if
// the CropBox is missing or empty because pdfbox will set it to the // the CropBox is missing or empty because pdfbox will set it to the
// same size as the MediaBox if it doesn't exist. Also note that we // same size as the MediaBox if it doesn't exist. Also note that we

View File

@@ -78,7 +78,7 @@ public class SHERPAService {
@SuppressWarnings("unused") @SuppressWarnings("unused")
@PostConstruct @PostConstruct
private void init() { private void init() {
// Get endoint and API key from configuration // Get endpoint and API key from configuration
endpoint = configurationService.getProperty("sherpa.romeo.url", endpoint = configurationService.getProperty("sherpa.romeo.url",
"https://v2.sherpa.ac.uk/cgi/retrieve"); "https://v2.sherpa.ac.uk/cgi/retrieve");
apiKey = configurationService.getProperty("sherpa.romeo.apikey"); apiKey = configurationService.getProperty("sherpa.romeo.apikey");
@@ -156,7 +156,7 @@ public class SHERPAService {
// If the response body is valid, pass to SHERPAResponse for parsing as JSON // If the response body is valid, pass to SHERPAResponse for parsing as JSON
if (null != responseBody) { if (null != responseBody) {
log.debug("Non-null SHERPA resonse received for query of " + value); log.debug("Non-null SHERPA response received for query of " + value);
InputStream content = null; InputStream content = null;
try { try {
content = responseBody.getContent(); content = responseBody.getContent();
@@ -259,7 +259,7 @@ public class SHERPAService {
// If the response body is valid, pass to SHERPAResponse for parsing as JSON // If the response body is valid, pass to SHERPAResponse for parsing as JSON
if (null != responseBody) { if (null != responseBody) {
log.debug("Non-null SHERPA resonse received for query of " + value); log.debug("Non-null SHERPA response received for query of " + value);
InputStream content = null; InputStream content = null;
try { try {
content = responseBody.getContent(); content = responseBody.getContent();

View File

@@ -13,7 +13,7 @@ import org.ehcache.event.CacheEvent;
import org.ehcache.event.CacheEventListener; import org.ehcache.event.CacheEventListener;
/** /**
* This is a EHCache listner responsible for logging sherpa cache events. It is * This is a EHCache listener responsible for logging sherpa cache events. It is
* bound to the sherpa cache via the dspace/config/ehcache.xml file. We need a * bound to the sherpa cache via the dspace/config/ehcache.xml file. We need a
* dedicated Logger for each cache as the CacheEvent doesn't include details * dedicated Logger for each cache as the CacheEvent doesn't include details
* about where the event occur * about where the event occur

View File

@@ -47,7 +47,7 @@ public class SHERPASubmitService {
} }
/** /**
* Setter for SHERPA service, reponsible for actual HTTP API calls * Setter for SHERPA service, responsible for actual HTTP API calls
* @see "dspace-dspace-addon-sherpa-configuration-services.xml" * @see "dspace-dspace-addon-sherpa-configuration-services.xml"
* @param sherpaService * @param sherpaService
*/ */

View File

@@ -141,7 +141,7 @@ public class GenerateSitemaps {
public static void deleteSitemaps() throws IOException { public static void deleteSitemaps() throws IOException {
File outputDir = new File(configurationService.getProperty("sitemap.dir")); File outputDir = new File(configurationService.getProperty("sitemap.dir"));
if (!outputDir.exists() && !outputDir.isDirectory()) { if (!outputDir.exists() && !outputDir.isDirectory()) {
log.error("Unable to delete sitemaps directory, doesn't exist or isn't a directort"); log.error("Unable to delete sitemaps directory, doesn't exist or isn't a directory");
} else { } else {
FileUtils.deleteDirectory(outputDir); FileUtils.deleteDirectory(outputDir);
} }

View File

@@ -451,7 +451,7 @@ public class HTMLReport implements Report {
} }
/** /**
* Clean Stirngs for display in HTML * Clean Strings for display in HTML
* *
* @param s The String to clean * @param s The String to clean
* @return The cleaned String * @return The cleaned String

View File

@@ -481,7 +481,7 @@ public class LogAnalyser {
// of the log file are sequential, but can we assume the files are // of the log file are sequential, but can we assume the files are
// provided in a data sequence? // provided in a data sequence?
for (i = 0; i < logFiles.length; i++) { for (i = 0; i < logFiles.length; i++) {
// check to see if this file is a log file agains the global regex // check to see if this file is a log file against the global regex
Matcher matchRegex = logRegex.matcher(logFiles[i].getName()); Matcher matchRegex = logRegex.matcher(logFiles[i].getName());
if (matchRegex.matches()) { if (matchRegex.matches()) {
// if it is a log file, open it up and lets have a look at the // if it is a log file, open it up and lets have a look at the

View File

@@ -352,7 +352,7 @@ public class ReportGenerator {
report.setEndDate(endDate); report.setEndDate(endDate);
report.setMainTitle(name, serverName); report.setMainTitle(name, serverName);
// define our standard variables for re-use // define our standard variables for reuse
// FIXME: we probably don't need these once we've finished re-factoring // FIXME: we probably don't need these once we've finished re-factoring
Iterator<String> keys = null; Iterator<String> keys = null;
int i = 0; int i = 0;
@@ -518,7 +518,7 @@ public class ReportGenerator {
/** /**
* a standard stats block preparation method for use when an aggregator * a standard stats block preparation method for use when an aggregator
* has to be put out in its entirity. This method will not be able to * has to be put out in its entirety. This method will not be able to
* deal with complex cases, although it will perform sorting by value and * deal with complex cases, although it will perform sorting by value and
* translations as per the map file if requested * translations as per the map file if requested
* *
@@ -783,7 +783,7 @@ public class ReportGenerator {
return null; return null;
} }
// build the referece // build the reference
// FIXME: here we have blurred the line between content and presentation // FIXME: here we have blurred the line between content and presentation
// and it should probably be un-blurred // and it should probably be un-blurred
List<MetadataValue> title = itemService.getMetadata(item, MetadataSchemaEnum.DC.getName(), List<MetadataValue> title = itemService.getMetadata(item, MetadataSchemaEnum.DC.getName(),

View File

@@ -291,7 +291,7 @@ public class StatisticsLoader {
* by the formatter provided, then we return null. * by the formatter provided, then we return null.
* *
* @param thisFile file * @param thisFile file
* @param thisPattern patter * @param thisPattern pattern
* @param sdf date format * @param sdf date format
* @return StatsFile * @return StatsFile
*/ */

View File

@@ -22,7 +22,7 @@ public interface SuggestionService {
/** find a {@link SuggestionTarget } by source name and suggestion id */ /** find a {@link SuggestionTarget } by source name and suggestion id */
public SuggestionTarget find(Context context, String source, UUID id); public SuggestionTarget find(Context context, String source, UUID id);
/** count all suggetion targets by suggestion source */ /** count all suggestion targets by suggestion source */
public long countAll(Context context, String source); public long countAll(Context context, String source);
/** find all suggestion targets by source (paged) */ /** find all suggestion targets by source (paged) */

View File

@@ -624,7 +624,7 @@ public class AuthorizeUtil {
throws SQLException { throws SQLException {
if (DSpaceServicesFactory.getInstance().getConfigurationService() if (DSpaceServicesFactory.getInstance().getConfigurationService()
.getBooleanProperty("user.registration", true)) { .getBooleanProperty("user.registration", true)) {
// This allowSetPassword is currently the only mthod that would return true only when it's // This allowSetPassword is currently the only method that would return true only when it's
// actually expected to be returning true. // actually expected to be returning true.
// For example the LDAP canSelfRegister will return true due to auto-register, while that // For example the LDAP canSelfRegister will return true due to auto-register, while that
// does not imply a new user can register explicitly // does not imply a new user can register explicitly

View File

@@ -62,7 +62,7 @@ public class IndexVersion {
// First argument is the Index path. Determine its version // First argument is the Index path. Determine its version
String indexVersion = getIndexVersion(argv[0]); String indexVersion = getIndexVersion(argv[0]);
// Second argumet is an optional version number to compare to // Second argument is an optional version number to compare to
String compareToVersion = argv.length > 1 ? argv[1] : null; String compareToVersion = argv.length > 1 ? argv[1] : null;
// If indexVersion comes back as null, then it is not a valid index directory. // If indexVersion comes back as null, then it is not a valid index directory.

View File

@@ -74,7 +74,7 @@ public class InitializeEntities {
private static void checkHelpEntered(Options options, CommandLine line) { private static void checkHelpEntered(Options options, CommandLine line) {
if (line.hasOption("h")) { if (line.hasOption("h")) {
HelpFormatter formatter = new HelpFormatter(); HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("Intialize Entities", options); formatter.printHelp("Initialize Entities", options);
System.exit(0); System.exit(0);
} }
} }

View File

@@ -36,7 +36,7 @@ public class RegexPatternUtils {
* Computes a pattern starting from a regex definition with flags that * Computes a pattern starting from a regex definition with flags that
* uses the standard format: <code>/{regex}/{flags}</code> (ECMAScript format). * uses the standard format: <code>/{regex}/{flags}</code> (ECMAScript format).
* This method can transform an ECMAScript regex into a java {@code Pattern} object * This method can transform an ECMAScript regex into a java {@code Pattern} object
* wich can be used to validate strings. * which can be used to validate strings.
* <br/> * <br/>
* If regex is null, empty or blank a null {@code Pattern} will be retrieved * If regex is null, empty or blank a null {@code Pattern} will be retrieved
* If it's a valid regex, then a non-null {@code Pattern} will be retrieved, * If it's a valid regex, then a non-null {@code Pattern} will be retrieved,

View File

@@ -158,7 +158,7 @@ public class SubmissionConfigReader {
* <li>Hashmap of Collection to Submission definition mappings - * <li>Hashmap of Collection to Submission definition mappings -
* defines which Submission process a particular collection uses * defines which Submission process a particular collection uses
* <li>Hashmap of all Submission definitions. List of all valid * <li>Hashmap of all Submission definitions. List of all valid
* Submision Processes by name. * Submission Processes by name.
* </ul> * </ul>
*/ */
private void buildInputs(String fileName) throws SubmissionConfigReaderException { private void buildInputs(String fileName) throws SubmissionConfigReaderException {
@@ -358,7 +358,7 @@ public class SubmissionConfigReader {
throws SubmissionConfigReaderException { throws SubmissionConfigReaderException {
// We should already have the step definitions loaded // We should already have the step definitions loaded
if (stepDefns != null) { if (stepDefns != null) {
// retreive step info // retrieve step info
Map<String, String> stepInfo = stepDefns.get(stepID); Map<String, String> stepInfo = stepDefns.get(stepID);
if (stepInfo != null) { if (stepInfo != null) {

View File

@@ -523,7 +523,7 @@ public class SyndicationFeed {
*/ */
protected String resolveURL(HttpServletRequest request, DSpaceObject dso) { protected String resolveURL(HttpServletRequest request, DSpaceObject dso) {
// If no object given then just link to the whole repository, // If no object given then just link to the whole repository,
// since no offical handle exists so we have to use local resolution. // since no official handle exists so we have to use local resolution.
if (dso == null) { if (dso == null) {
if (baseURL == null) { if (baseURL == null) {
if (request == null) { if (request == null) {

View File

@@ -179,7 +179,7 @@ public class Util {
* @return the file size as a String * @return the file size as a String
*/ */
public static String formatFileSize(double in) { public static String formatFileSize(double in) {
// Work out the size of the file, and format appropriatly // Work out the size of the file, and format appropriately
// FIXME: When full i18n support is available, use the user's Locale // FIXME: When full i18n support is available, use the user's Locale
// rather than the default Locale. // rather than the default Locale.
NumberFormat nf = NumberFormat.getNumberInstance(Locale.getDefault()); NumberFormat nf = NumberFormat.getNumberInstance(Locale.getDefault());
@@ -238,7 +238,7 @@ public class Util {
} catch (Exception e) { } catch (Exception e) {
// at least log this error to make debugging easier // at least log this error to make debugging easier
// do not silently return null only. // do not silently return null only.
log.warn("Unable to recoginze UUID from String \"" log.warn("Unable to recognize UUID from String \""
+ val + "\". Will return null.", e); + val + "\". Will return null.", e);
// Problem with parameter // Problem with parameter
return null; return null;

View File

@@ -85,7 +85,7 @@ public interface OpenSearchService {
* @param start - start result index * @param start - start result index
* @param pageSize - page size * @param pageSize - page size
* @param scope - search scope, null or the community/collection * @param scope - search scope, null or the community/collection
* @param results the retreived DSpace objects satisfying search * @param results the retrieved DSpace objects satisfying search
* @param labels labels to apply - format specific * @param labels labels to apply - format specific
* @return formatted search results * @return formatted search results
* @throws IOException if IO error * @throws IOException if IO error
@@ -105,7 +105,7 @@ public interface OpenSearchService {
* @param start - start result index * @param start - start result index
* @param pageSize - page size * @param pageSize - page size
* @param scope - search scope, null or the community/collection * @param scope - search scope, null or the community/collection
* @param results the retreived DSpace objects satisfying search * @param results the retrieved DSpace objects satisfying search
* @param labels labels to apply - format specific * @param labels labels to apply - format specific
* @return formatted search results * @return formatted search results
* @throws IOException if IO error * @throws IOException if IO error

View File

@@ -235,7 +235,7 @@ public class OidcAuthenticationBean implements AuthenticationMethod {
try { try {
return oidcClient.getAccessToken(code); return oidcClient.getAccessToken(code);
} catch (Exception ex) { } catch (Exception ex) {
LOGGER.error("An error occurs retriving the OIDC access_token", ex); LOGGER.error("An error occurs retrieving the OIDC access_token", ex);
return null; return null;
} }
} }
@@ -244,7 +244,7 @@ public class OidcAuthenticationBean implements AuthenticationMethod {
try { try {
return oidcClient.getUserInfo(accessToken); return oidcClient.getUserInfo(accessToken);
} catch (Exception ex) { } catch (Exception ex) {
LOGGER.error("An error occurs retriving the OIDC user info", ex); LOGGER.error("An error occurs retrieving the OIDC user info", ex);
return Map.of(); return Map.of();
} }
} }

View File

@@ -282,7 +282,7 @@ public class OrcidAuthenticationBean implements AuthenticationMethod {
try { try {
return orcidClient.getPerson(token.getAccessToken(), token.getOrcid()); return orcidClient.getPerson(token.getAccessToken(), token.getOrcid());
} catch (Exception ex) { } catch (Exception ex) {
LOGGER.error("An error occurs retriving the ORCID record with id {}", LOGGER.error("An error occurs retrieving the ORCID record with id {}",
token.getOrcid(), ex); token.getOrcid(), ex);
return null; return null;
} }
@@ -320,7 +320,7 @@ public class OrcidAuthenticationBean implements AuthenticationMethod {
try { try {
return orcidClient.getAccessToken(code); return orcidClient.getAccessToken(code);
} catch (Exception ex) { } catch (Exception ex) {
LOGGER.error("An error occurs retriving the ORCID access_token", ex); LOGGER.error("An error occurs retrieving the ORCID access_token", ex);
return null; return null;
} }
} }

View File

@@ -82,7 +82,7 @@ public class PasswordAuthentication
// No conditions set, so must be able to self register // No conditions set, so must be able to self register
return true; return true;
} else { } else {
// Itterate through all domains // Iterate through all domains
String check; String check;
email = email.trim().toLowerCase(); email = email.trim().toLowerCase();
for (int i = 0; i < domains.length; i++) { for (int i = 0; i < domains.length; i++) {

View File

@@ -287,7 +287,7 @@ public class ShibAuthentication implements AuthenticationMethod {
@Override @Override
public List<Group> getSpecialGroups(Context context, HttpServletRequest request) { public List<Group> getSpecialGroups(Context context, HttpServletRequest request) {
try { try {
// User has not successfuly authenticated via shibboleth. // User has not successfully authenticated via shibboleth.
if (request == null || if (request == null ||
context.getCurrentUser() == null) { context.getCurrentUser() == null) {
return Collections.EMPTY_LIST; return Collections.EMPTY_LIST;
@@ -309,7 +309,7 @@ public class ShibAuthentication implements AuthenticationMethod {
if (ignoreScope && ignoreValue) { if (ignoreScope && ignoreValue) {
throw new IllegalStateException( throw new IllegalStateException(
"Both config parameters for ignoring an roll attributes scope and value are turned on, this is " + "Both config parameters for ignoring an roll attributes scope and value are turned on, this is " +
"not a permissable configuration. (Note: ignore-scope defaults to true) The configuration " + "not a permissible configuration. (Note: ignore-scope defaults to true) The configuration " +
"parameters are: 'authentication.shib.role-header.ignore-scope' and 'authentication.shib" + "parameters are: 'authentication.shib.role-header.ignore-scope' and 'authentication.shib" +
".role-header.ignore-value'"); ".role-header.ignore-value'");
} }
@@ -391,7 +391,7 @@ public class ShibAuthentication implements AuthenticationMethod {
return new ArrayList<>(groups); return new ArrayList<>(groups);
} catch (Throwable t) { } catch (Throwable t) {
log.error("Unable to validate any sepcial groups this user may belong too because of an exception.", t); log.error("Unable to validate any special groups this user may belong too because of an exception.", t);
return Collections.EMPTY_LIST; return Collections.EMPTY_LIST;
} }
} }
@@ -546,7 +546,7 @@ public class ShibAuthentication implements AuthenticationMethod {
/** /**
* Identify an existing EPerson based upon the shibboleth attributes provided on * Identify an existing EPerson based upon the shibboleth attributes provided on
* the request object. There are three cases where this can occurr, each as * the request object. There are three cases where this can occur, each as
* a fallback for the previous method. * a fallback for the previous method.
* *
* 1) NetID from Shibboleth Header (best) * 1) NetID from Shibboleth Header (best)
@@ -671,7 +671,7 @@ public class ShibAuthentication implements AuthenticationMethod {
if (!foundNetID && !foundEmail && !foundRemoteUser) { if (!foundNetID && !foundEmail && !foundRemoteUser) {
log.error( log.error(
"Shibboleth authentication was not able to find a NetId, Email, or Tomcat Remote user for which to " + "Shibboleth authentication was not able to find a NetId, Email, or Tomcat Remote user for which to " +
"indentify a user from."); "identify a user from.");
} }
@@ -931,7 +931,7 @@ public class ShibAuthentication implements AuthenticationMethod {
"compatibility mode."); "compatibility mode.");
return SUCCESS; return SUCCESS;
} else { } else {
// Passsword failure // Password failure
log.error( log.error(
"Shibboleth-based password authentication failed for user " + username + " because a bad password was" + "Shibboleth-based password authentication failed for user " + username + " because a bad password was" +
" supplied."); " supplied.");
@@ -944,7 +944,7 @@ public class ShibAuthentication implements AuthenticationMethod {
/** /**
* Initialize Shibboleth Authentication. * Initialize Shibboleth Authentication.
* *
* During initalization the mapping of additional eperson metadata will be loaded from the DSpace.cfg * During initialization the mapping of additional eperson metadata will be loaded from the DSpace.cfg
* and cached. While loading the metadata mapping this method will check the EPerson object to see * and cached. While loading the metadata mapping this method will check the EPerson object to see
* if it supports the metadata field. If the field is not supported and autocreate is turned on then * if it supports the metadata field. If the field is not supported and autocreate is turned on then
* the field will be automatically created. * the field will be automatically created.
@@ -985,7 +985,7 @@ public class ShibAuthentication implements AuthenticationMethod {
String[] metadataParts = metadataString.split("=>"); String[] metadataParts = metadataString.split("=>");
if (metadataParts.length != 2) { if (metadataParts.length != 2) {
log.error("Unable to parse metadat mapping string: '" + metadataString + "'"); log.error("Unable to parse metadata mapping string: '" + metadataString + "'");
continue; continue;
} }

View File

@@ -137,7 +137,7 @@ public class PolicySet {
* otherwise add to existing policies * otherwise add to existing policies
* @param clearOnly if non-null, only process bitstreams whose names contain filter * @param clearOnly if non-null, only process bitstreams whose names contain filter
* @param name policy name * @param name policy name
* @param description policy descrption * @param description policy description
* @param startDate policy start date * @param startDate policy start date
* @param endDate policy end date * @param endDate policy end date
* @throws SQLException if database error * @throws SQLException if database error

View File

@@ -17,7 +17,7 @@ import org.springframework.beans.factory.annotation.Autowired;
/** /**
* Implementation of {@link PasswordValidatorService} that verifies if the given * Implementation of {@link PasswordValidatorService} that verifies if the given
* passowrd matches the configured pattern. * password matches the configured pattern.
* *
* @author Luca Giamminonni (luca.giamminonni at 4science.it) * @author Luca Giamminonni (luca.giamminonni at 4science.it)
*/ */

View File

@@ -133,7 +133,7 @@ public interface AuthorizeService {
public boolean authorizeActionBoolean(Context c, DSpaceObject o, int a, boolean useInheritance) throws SQLException; public boolean authorizeActionBoolean(Context c, DSpaceObject o, int a, boolean useInheritance) throws SQLException;
/** /**
* same authorize with a specif eperson (not the current user), returns boolean for those who don't want to deal * same authorize with a specific eperson (not the current user), returns boolean for those who don't want to deal
* with * with
* catching exceptions. * catching exceptions.
* *
@@ -235,7 +235,7 @@ public interface AuthorizeService {
* @param o DSpaceObject to add policy to * @param o DSpaceObject to add policy to
* @param actionID ID of action from <code>org.dspace.core.Constants</code> * @param actionID ID of action from <code>org.dspace.core.Constants</code>
* @param e eperson who can perform the action * @param e eperson who can perform the action
* @param type policy type, deafult types are declared in the ResourcePolicy class * @param type policy type, default types are declared in the ResourcePolicy class
* @throws SQLException if database error * @throws SQLException if database error
* @throws AuthorizeException if current user in context is not authorized to add policies * @throws AuthorizeException if current user in context is not authorized to add policies
*/ */
@@ -261,7 +261,7 @@ public interface AuthorizeService {
* @param o object to add policy for * @param o object to add policy for
* @param actionID ID of action from <code>org.dspace.core.Constants</code> * @param actionID ID of action from <code>org.dspace.core.Constants</code>
* @param g group to add policy for * @param g group to add policy for
* @param type policy type, deafult types are declared in the ResourcePolicy class * @param type policy type, default types are declared in the ResourcePolicy class
* @throws SQLException if there's a database problem * @throws SQLException if there's a database problem
* @throws AuthorizeException if the current user is not authorized to add this policy * @throws AuthorizeException if the current user is not authorized to add this policy
*/ */

View File

@@ -195,7 +195,7 @@ public class BrowseIndex {
} }
} }
// for backward compatability we ignore the keywords // for backward compatibility we ignore the keywords
// single and full here // single and full here
if (!sortName.equalsIgnoreCase("single") if (!sortName.equalsIgnoreCase("single")
&& !sortName.equalsIgnoreCase("full") && !sortName.equalsIgnoreCase("full")
@@ -597,7 +597,7 @@ public class BrowseIndex {
/** /**
* Is the browse index of display type single? * Is the browse index of display type single?
* *
* @return true if singe, false if not * @return true if single, false if not
*/ */
public boolean isMetadataIndex() { public boolean isMetadataIndex() {
return displayType != null && displayType.startsWith("metadata"); return displayType != null && displayType.startsWith("metadata");

View File

@@ -100,7 +100,7 @@ public class CrossLinks {
// Resolve wildcards properly, eg. dc.subject.other matches a configuration for dc.subject.* // Resolve wildcards properly, eg. dc.subject.other matches a configuration for dc.subject.*
for (String key : links.keySet()) { for (String key : links.keySet()) {
if (null != key && key.endsWith(".*")) { if (null != key && key.endsWith(".*")) {
// A substring of length-1, also substracting the wildcard should work as a "startsWith" // A substring of length-1, also subtracting the wildcard should work as a "startsWith"
// check for the field eg. dc.subject.* -> dc.subject is the start of dc.subject.other // check for the field eg. dc.subject.* -> dc.subject is the start of dc.subject.other
if (null != metadata && metadata.startsWith(key.substring(0, key.length() - 1 - ".*".length()))) { if (null != metadata && metadata.startsWith(key.substring(0, key.length() - 1 - ".*".length()))) {
return links.get(key); return links.get(key);

View File

@@ -60,7 +60,7 @@ public class LimitedCountDispatcher implements BitstreamDispatcher {
} }
/** /**
* Retreives the next bitstream to be checked. * Retrieves the next bitstream to be checked.
* *
* @return the bitstream * @return the bitstream
* @throws SQLException if database error * @throws SQLException if database error

View File

@@ -131,7 +131,7 @@ public class Bundle extends DSpaceObject implements DSpaceObjectLegacySupport {
/** /**
* Get a copy of the bitstream list of this bundle * Get a copy of the bitstream list of this bundle
* Note that this is a copy and if you wish to manipulate the bistream list, you should use * Note that this is a copy and if you wish to manipulate the bitstream list, you should use
* {@ref Bundle.addBitstream}, {@ref Bundle.removeBitstream} or {@ref Bundle.clearBitstreams} * {@ref Bundle.addBitstream}, {@ref Bundle.removeBitstream} or {@ref Bundle.clearBitstreams}
* *
* @return the bitstreams * @return the bitstreams

View File

@@ -155,7 +155,7 @@ public class Collection extends CacheableDSpaceObject implements DSpaceObjectLeg
/** /**
* Set the default group of submitters * Set the default group of submitters
* *
* Package protected in order to preven unauthorized calls to this method * Package protected in order to prevent unauthorized calls to this method
* *
* @param submitters the group of submitters * @param submitters the group of submitters
*/ */

View File

@@ -44,7 +44,7 @@ public class DCPersonName {
* @param rawValue the value entry from the database * @param rawValue the value entry from the database
*/ */
public DCPersonName(String rawValue) { public DCPersonName(String rawValue) {
// Null by default (representing noone) // Null by default (representing no one)
lastName = null; lastName = null;
firstNames = null; firstNames = null;

View File

@@ -323,7 +323,7 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
} }
} }
metadataValue.setValue(String.valueOf(dcvalue)); metadataValue.setValue(String.valueOf(dcvalue));
//An update here isn't needed, this is persited upon the merge of the owning object //An update here isn't needed, this is persisted upon the merge of the owning object
// metadataValueService.update(context, metadataValue); // metadataValueService.update(context, metadataValue);
dso.addDetails(metadataField.toString()); dso.addDetails(metadataField.toString());
} }

View File

@@ -1005,7 +1005,7 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
throws SQLException, AuthorizeException { throws SQLException, AuthorizeException {
// Bundles should inherit from DEFAULT_ITEM_READ so that if the item is readable, the files // Bundles should inherit from DEFAULT_ITEM_READ so that if the item is readable, the files
// can be listed (even if they are themselves not readable as per DEFAULT_BITSTREAM_READ or other // can be listed (even if they are themselves not readable as per DEFAULT_BITSTREAM_READ or other
// policies or embargos applied // policies or embargoes applied
List<ResourcePolicy> defaultCollectionBundlePolicies = authorizeService List<ResourcePolicy> defaultCollectionBundlePolicies = authorizeService
.getPoliciesActionFilter(context, collection, Constants.DEFAULT_ITEM_READ); .getPoliciesActionFilter(context, collection, Constants.DEFAULT_ITEM_READ);
// Bitstreams should inherit from DEFAULT_BITSTREAM_READ // Bitstreams should inherit from DEFAULT_BITSTREAM_READ

View File

@@ -52,7 +52,7 @@ public class MetadataValueServiceImpl implements MetadataValueService {
metadataValue.setMetadataField(metadataField); metadataValue.setMetadataField(metadataField);
metadataValue.setDSpaceObject(dso); metadataValue.setDSpaceObject(dso);
dso.addMetadata(metadataValue); dso.addMetadata(metadataValue);
//An update here isn't needed, this is persited upon the merge of the owning object //An update here isn't needed, this is persisted upon the merge of the owning object
// metadataValueDAO.save(context, metadataValue); // metadataValueDAO.save(context, metadataValue);
metadataValue = metadataValueDAO.create(context, metadataValue); metadataValue = metadataValueDAO.create(context, metadataValue);
log.info(LogHelper.getHeader(context, "add_metadatavalue", log.info(LogHelper.getHeader(context, "add_metadatavalue",

View File

@@ -98,7 +98,7 @@ public class RelationshipType implements ReloadableEntity<Integer> {
private Integer rightMinCardinality; private Integer rightMinCardinality;
/** /**
* Tha maximum amount of relations for the rightItem that can be present at all times * The maximum amount of relations for the rightItem that can be present at all times
*/ */
@Column(name = "right_max_cardinality") @Column(name = "right_max_cardinality")
private Integer rightMaxCardinality; private Integer rightMaxCardinality;

View File

@@ -77,7 +77,7 @@ public class Choice {
/** /**
* Constructor for common need of Hierarchical authorities that want to * Constructor for common need of Hierarchical authorities that want to
* explicitely set the selectable flag * explicitly set the selectable flag
* *
* @param authority the authority key * @param authority the authority key
* @param value the text value to store in the metadata * @param value the text value to store in the metadata

View File

@@ -131,7 +131,7 @@ public interface ChoiceAuthority extends NameAwarePlugin {
* Build the preferred choice associated with the authKey. The default * Build the preferred choice associated with the authKey. The default
* implementation delegate the creato to the {@link #getLabel(String, String)} * implementation delegate the creato to the {@link #getLabel(String, String)}
* {@link #getValue(String, String)} and {@link #getExtra(String, String)} * {@link #getValue(String, String)} and {@link #getExtra(String, String)}
* methods but can be directly overriden for better efficiency or special * methods but can be directly overridden for better efficiency or special
* scenario * scenario
* *
* @param authKey authority key known to this authority. * @param authKey authority key known to this authority.

View File

@@ -42,7 +42,7 @@ import org.springframework.beans.factory.annotation.Autowired;
* Broker for ChoiceAuthority plugins, and for other information configured * Broker for ChoiceAuthority plugins, and for other information configured
* about the choice aspect of authority control for a metadata field. * about the choice aspect of authority control for a metadata field.
* *
* Configuration keys, per metadata field (e.g. "dc.contributer.author") * Configuration keys, per metadata field (e.g. "dc.contributor.author")
* *
* {@code * {@code
* # names the ChoiceAuthority plugin called for this field * # names the ChoiceAuthority plugin called for this field

View File

@@ -30,7 +30,7 @@ import org.dspace.core.SelfNamedPlugin;
* configurable submission. * configurable submission.
* *
* Configuration: * Configuration:
* This MUST be configured aas a self-named plugin, e.g.: * This MUST be configured as a self-named plugin, e.g.:
* {@code * {@code
* plugin.selfnamed.org.dspace.content.authority.ChoiceAuthority = \ * plugin.selfnamed.org.dspace.content.authority.ChoiceAuthority = \
* org.dspace.content.authority.DCInputAuthority * org.dspace.content.authority.DCInputAuthority

View File

@@ -76,7 +76,7 @@ public class SolrAuthority implements ChoiceAuthority {
Integer.parseInt(locale); Integer.parseInt(locale);
locale = null; locale = null;
} catch (NumberFormatException e) { } catch (NumberFormatException e) {
//Everything is allright //Everything is alright
} }
if (locale != null && !"".equals(locale)) { if (locale != null && !"".equals(locale)) {
localSearchField = searchField + "_" + locale; localSearchField = searchField + "_" + locale;

View File

@@ -22,7 +22,7 @@ import org.dspace.content.authority.DSpaceControlledVocabularyIndex;
* Broker for ChoiceAuthority plugins, and for other information configured * Broker for ChoiceAuthority plugins, and for other information configured
* about the choice aspect of authority control for a metadata field. * about the choice aspect of authority control for a metadata field.
* *
* Configuration keys, per metadata field (e.g. "dc.contributer.author") * Configuration keys, per metadata field (e.g. "dc.contributor.author")
* {@code * {@code
* # names the ChoiceAuthority plugin called for this field * # names the ChoiceAuthority plugin called for this field
* choices.plugin.<FIELD> = name-of-plugin * choices.plugin.<FIELD> = name-of-plugin

View File

@@ -14,7 +14,7 @@ import org.dspace.content.MetadataField;
/** /**
* Broker for metadata authority settings configured for each metadata field. * Broker for metadata authority settings configured for each metadata field.
* *
* Configuration keys, per metadata field (e.g. "dc.contributer.author") * Configuration keys, per metadata field (e.g. "dc.contributor.author")
* *
* # is field authority controlled (i.e. store authority, confidence values)? * # is field authority controlled (i.e. store authority, confidence values)?
* {@code authority.controlled.<FIELD> = true} * {@code authority.controlled.<FIELD> = true}

View File

@@ -36,7 +36,7 @@ import org.jdom2.Namespace;
*/ */
public class DIMDisseminationCrosswalk public class DIMDisseminationCrosswalk
implements DisseminationCrosswalk { implements DisseminationCrosswalk {
// Non-existant XSD schema // Non-existent XSD schema
public static final String DIM_XSD = "null"; public static final String DIM_XSD = "null";
// Namespaces // Namespaces

View File

@@ -37,7 +37,7 @@ public interface IngestionCrosswalk {
* internal representations. This version accepts metadata as a * internal representations. This version accepts metadata as a
* <code>List</code> of JDOM XML elements. It interprets the * <code>List</code> of JDOM XML elements. It interprets the
* contents of each element and adds the appropriate values to the * contents of each element and adds the appropriate values to the
* DSpace Object's internal metadata represenation. * DSpace Object's internal metadata representation.
* <p> * <p>
* Note that this method may be called several times for the same target * Note that this method may be called several times for the same target
* Item, if the metadata comes as several lists of elements, so it should * Item, if the metadata comes as several lists of elements, so it should

View File

@@ -202,7 +202,7 @@ public class MODSDisseminationCrosswalk extends SelfNamedPlugin
* e.g. dc.contributor.author * e.g. dc.contributor.author
* *
* 2. XML fragment is prototype of metadata element, with empty or "%s" * 2. XML fragment is prototype of metadata element, with empty or "%s"
* placeholders for value(s). NOTE: Leave the %s's in becaue * placeholders for value(s). NOTE: Leave the %s's in because
* it's much easier then to see if something is broken. * it's much easier then to see if something is broken.
* *
* 3. XPath expression listing point(s) in the above XML where * 3. XPath expression listing point(s) in the above XML where

View File

@@ -173,7 +173,7 @@ public class OREIngestionCrosswalk
try { try {
// Make sure the url string escapes all the oddball characters // Make sure the url string escapes all the oddball characters
String processedURL = encodeForURL(href); String processedURL = encodeForURL(href);
// Generate a requeset for the aggregated resource // Generate a request for the aggregated resource
ARurl = new URL(processedURL); ARurl = new URL(processedURL);
in = ARurl.openStream(); in = ARurl.openStream();
} catch (FileNotFoundException fe) { } catch (FileNotFoundException fe) {

View File

@@ -113,7 +113,7 @@ public class QDCCrosswalk extends SelfNamedPlugin
private static final Namespace DCTERMS_NS = private static final Namespace DCTERMS_NS =
Namespace.getNamespace("dcterms", "http://purl.org/dc/terms/"); Namespace.getNamespace("dcterms", "http://purl.org/dc/terms/");
// sentinal: done init? // sentinel: done init?
private boolean inited = false; private boolean inited = false;
// my plugin name // my plugin name

View File

@@ -47,12 +47,12 @@ public class SubscriptionDsoMetadataForEmailCompose implements StreamDisseminati
Item item = (Item) dso; Item item = (Item) dso;
PrintStream printStream = new PrintStream(out); PrintStream printStream = new PrintStream(out);
for (String actualMetadata : metadata) { for (String actualMetadata : metadata) {
String[] splitted = actualMetadata.split("\\."); String[] split = actualMetadata.split("\\.");
String qualifier = null; String qualifier = null;
if (splitted.length == 3) { if (split.length == 3) {
qualifier = splitted[2]; qualifier = split[2];
} }
var metadataValue = itemService.getMetadataFirstValue(item, splitted[0], splitted[1], qualifier, ANY); var metadataValue = itemService.getMetadataFirstValue(item, split[0], split[1], qualifier, ANY);
printStream.print(metadataValue + " "); printStream.print(metadataValue + " ");
} }
String itemURL = HandleServiceFactory.getInstance() String itemURL = HandleServiceFactory.getInstance()

View File

@@ -52,7 +52,7 @@ public class MetadataValueDTO {
* @param schema The schema to be assigned to this MetadataValueDTO object * @param schema The schema to be assigned to this MetadataValueDTO object
* @param element The element to be assigned to this MetadataValueDTO object * @param element The element to be assigned to this MetadataValueDTO object
* @param qualifier The qualifier to be assigned to this MetadataValueDTO object * @param qualifier The qualifier to be assigned to this MetadataValueDTO object
* @param language The language to be assigend to this MetadataValueDTO object * @param language The language to be assigned to this MetadataValueDTO object
* @param value The value to be assigned to this MetadataValueDTO object * @param value The value to be assigned to this MetadataValueDTO object
* @param authority The authority to be assigned to this MetadataValueDTO object * @param authority The authority to be assigned to this MetadataValueDTO object
* @param confidence The confidence to be assigned to this MetadataValueDTO object * @param confidence The confidence to be assigned to this MetadataValueDTO object
@@ -73,7 +73,7 @@ public class MetadataValueDTO {
* @param schema The schema to be assigned to this MetadataValueDTO object * @param schema The schema to be assigned to this MetadataValueDTO object
* @param element The element to be assigned to this MetadataValueDTO object * @param element The element to be assigned to this MetadataValueDTO object
* @param qualifier The qualifier to be assigned to this MetadataValueDTO object * @param qualifier The qualifier to be assigned to this MetadataValueDTO object
* @param language The language to be assigend to this MetadataValueDTO object * @param language The language to be assigned to this MetadataValueDTO object
* @param value The value to be assigned to this MetadataValueDTO object * @param value The value to be assigned to this MetadataValueDTO object
*/ */
public MetadataValueDTO(String schema, String element, String qualifier, String language, String value) { public MetadataValueDTO(String schema, String element, String qualifier, String language, String value) {

View File

@@ -74,7 +74,7 @@ public class FilterUtils {
Map<Class<? extends Identifier>, Filter> filters = new HashMap<>(); Map<Class<? extends Identifier>, Filter> filters = new HashMap<>();
// Put DOI 'can we create DOI on install / workspace?' filter // Put DOI 'can we create DOI on install / workspace?' filter
Filter filter = FilterUtils.getFilterFromConfiguration("identifiers.submission.filter." + configurationSuffix); Filter filter = FilterUtils.getFilterFromConfiguration("identifiers.submission.filter." + configurationSuffix);
// A null filter should be handled safely by the identifier provier (default, or "always true") // A null filter should be handled safely by the identifier provider (default, or "always true")
filters.put(DOI.class, filter); filters.put(DOI.class, filter);
// This won't have an affect until handle providers implement filtering, but is an example of // This won't have an affect until handle providers implement filtering, but is an example of
// how the filters can be used for other types // how the filters can be used for other types

View File

@@ -139,7 +139,7 @@ public class DSpaceAIPIngester
} }
} }
// MODS is acceptable otehrwise.. // MODS is acceptable otherwise..
if (found == -1) { if (found == -1) {
for (int i = 0; i < dmds.length; ++i) { for (int i = 0; i < dmds.length; ++i) {
//NOTE: METS standard actually says this should be MODS (all uppercase). But, //NOTE: METS standard actually says this should be MODS (all uppercase). But,

View File

@@ -360,7 +360,7 @@ public class PDFPackager
* CreationDate -> date.created * CreationDate -> date.created
* ModDate -> date.created * ModDate -> date.created
* Creator -> description.provenance (application that created orig) * Creator -> description.provenance (application that created orig)
* Producer -> description.provenance (convertor to pdf) * Producer -> description.provenance (converter to pdf)
* Subject -> description.abstract * Subject -> description.abstract
* Keywords -> subject.other * Keywords -> subject.other
* date is java.util.Calendar * date is java.util.Calendar

View File

@@ -38,7 +38,7 @@ import org.dspace.core.Context;
* format output by <code>disseminate</code> may be affected by * format output by <code>disseminate</code> may be affected by
* parameters, it is given to the <code>getMIMEType</code> method as well. * parameters, it is given to the <code>getMIMEType</code> method as well.
* The parameters list is a generalized mechanism to pass parameters * The parameters list is a generalized mechanism to pass parameters
* from the package requestor to the packager, since different packagers will * from the package requester to the packager, since different packagers will
* understand different sets of parameters. * understand different sets of parameters.
* *
* @author Larry Stone * @author Larry Stone

View File

@@ -34,7 +34,7 @@ import org.dspace.workflow.WorkflowException;
* The ingest methods are also given an attribute-value * The ingest methods are also given an attribute-value
* list of "parameters" which may modify their actions. * list of "parameters" which may modify their actions.
* The parameters list is a generalized mechanism to pass parameters * The parameters list is a generalized mechanism to pass parameters
* from the requestor to the packager, since different packagers will * from the requester to the packager, since different packagers will
* understand different sets of parameters. * understand different sets of parameters.
* *
* @author Larry Stone * @author Larry Stone

View File

@@ -181,7 +181,7 @@ public interface CommunityService extends DSpaceObjectService<Community>, DSpace
/** /**
* Add an exisiting collection to the community * Add an existing collection to the community
* *
* @param context context * @param context context
* @param community community * @param community community

View File

@@ -20,7 +20,7 @@ import org.dspace.core.Context;
public interface FeedbackService { public interface FeedbackService {
/** /**
* This method sends the feeback email to the recipient passed as parameter * This method sends the feedback email to the recipient passed as parameter
* @param context current DSpace application context * @param context current DSpace application context
* @param request current servlet request * @param request current servlet request
* @param recipientEmail recipient to which mail is sent * @param recipientEmail recipient to which mail is sent

View File

@@ -282,7 +282,7 @@ public interface RelationshipService extends DSpaceCRUDService<Relationship> {
List<Relationship> findByRelationshipType(Context context, RelationshipType relationshipType) throws SQLException; List<Relationship> findByRelationshipType(Context context, RelationshipType relationshipType) throws SQLException;
/** /**
* This method returns a list of Relationship objets for which the relationshipType property is equal to the given * This method returns a list of Relationship objects for which the relationshipType property is equal to the given
* RelationshipType object * RelationshipType object
* NOTE: tilted relationships are NEVER excluded when fetching one relationship type * NOTE: tilted relationships are NEVER excluded when fetching one relationship type
* @param context The relevant DSpace context * @param context The relevant DSpace context

View File

@@ -62,16 +62,16 @@ public class Concatenate implements VirtualMetadataConfiguration {
} }
/** /**
* Generic getter for the seperator * Generic getter for the separator
* @return the seperator to be used by this bean * @return the separator to be used by this bean
*/ */
public String getSeparator() { public String getSeparator() {
return separator; return separator;
} }
/** /**
* Generic setter for the seperator property * Generic setter for the separator property
* @param separator The String seperator value to which this seperator value will be set to * @param separator The String separator value to which this separator value will be set to
*/ */
public void setSeparator(String separator) { public void setSeparator(String separator) {
this.separator = separator; this.separator = separator;

View File

@@ -346,7 +346,7 @@ public abstract class AbstractHibernateDAO<T> implements GenericDAO<T> {
/** /**
* This method will return the count of items for this query as an integer * This method will return the count of items for this query as an integer
* This query needs to already be in a formate that'll return one record that contains the amount * This query needs to already be in a format that'll return one record that contains the amount
* *
* @param query * @param query
* The query for which the amount of results will be returned. * The query for which the amount of results will be returned.

View File

@@ -713,7 +713,7 @@ public class Context implements AutoCloseable {
public void switchContextUser(EPerson newUser) { public void switchContextUser(EPerson newUser) {
if (currentUserPreviousState != null) { if (currentUserPreviousState != null) {
throw new IllegalStateException( throw new IllegalStateException(
"A previous user is already set, you can only switch back and foreward one time"); "A previous user is already set, you can only switch back and forward one time");
} }
currentUserPreviousState = currentUser; currentUserPreviousState = currentUser;

View File

@@ -34,6 +34,6 @@ public final class HibernateProxyHelper {
} }
private HibernateProxyHelper() { private HibernateProxyHelper() {
//cant instantiate //can't instantiate
} }
} }

View File

@@ -50,7 +50,7 @@ public class LogHelper {
StringBuilder result = new StringBuilder(); StringBuilder result = new StringBuilder();
// Escape everthing but the extra context info because for some crazy reason two fields // Escape everything but the extra context info because for some crazy reason two fields
// are generated inside this entry one for the session id, and another for the ip // are generated inside this entry one for the session id, and another for the ip
// address. Everything else should be escaped. // address. Everything else should be escaped.
result.append(escapeLogField(email)).append(":").append(contextExtraInfo).append(":") result.append(escapeLogField(email)).append(":").append(contextExtraInfo).append(":")

View File

@@ -95,7 +95,7 @@ public final class Utils {
private static final SimpleDateFormat outFmtSecond private static final SimpleDateFormat outFmtSecond
= new SimpleDateFormat("yyyy'-'MM'-'dd'T'HH':'mm':'ssZ"); = new SimpleDateFormat("yyyy'-'MM'-'dd'T'HH':'mm':'ssZ");
// output format with millsecond precision // output format with millisecond precision
private static final SimpleDateFormat outFmtMillisec private static final SimpleDateFormat outFmtMillisec
= new SimpleDateFormat("yyyy'-'MM'-'dd'T'HH':'mm':'ss.SSSZ"); = new SimpleDateFormat("yyyy'-'MM'-'dd'T'HH':'mm':'ss.SSSZ");

View File

@@ -32,7 +32,7 @@ import org.springframework.beans.factory.annotation.Autowired;
/** /**
* Implementation class for {@link CorrectionType} * Implementation class for {@link CorrectionType}
* that will withdrawn target item if it archived and wasn't withdrawn alredy. * that will withdrawn target item if it archived and wasn't withdrawn already.
* *
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com) * @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/ */

View File

@@ -145,7 +145,7 @@ public class BasicLinkChecker extends AbstractCurationTask {
} }
/** /**
* Internal utitity method to get a description of the handle * Internal utility method to get a description of the handle
* *
* @param item The item to get a description of * @param item The item to get a description of
* @return The handle, or in workflow * @return The handle, or in workflow

View File

@@ -114,7 +114,7 @@ public class CitationPage extends AbstractCurationTask {
// don't inherit now otherwise they will be copied over the moved bitstreams // don't inherit now otherwise they will be copied over the moved bitstreams
resourcePolicyService.removeAllPolicies(Curator.curationContext(), dBundle); resourcePolicyService.removeAllPolicies(Curator.curationContext(), dBundle);
} catch (AuthorizeException e) { } catch (AuthorizeException e) {
log.error("User not authroized to create bundle on item \"{}\": {}", log.error("User not authorized to create bundle on item \"{}\": {}",
item::getName, e::getMessage); item::getName, e::getMessage);
return; return;
} }
@@ -144,7 +144,7 @@ public class CitationPage extends AbstractCurationTask {
// don't inherit now otherwise they will be copied over the moved bitstreams // don't inherit now otherwise they will be copied over the moved bitstreams
resourcePolicyService.removeAllPolicies(Curator.curationContext(), pBundle); resourcePolicyService.removeAllPolicies(Curator.curationContext(), pBundle);
} catch (AuthorizeException e) { } catch (AuthorizeException e) {
log.error("User not authroized to create bundle on item \"" log.error("User not authorized to create bundle on item \""
+ item.getName() + "\": " + e.getMessage()); + item.getName() + "\": " + e.getMessage());
} }
bundles = itemService.getBundles(item, "ORIGINAL"); bundles = itemService.getBundles(item, "ORIGINAL");
@@ -173,7 +173,7 @@ public class CitationPage extends AbstractCurationTask {
InputStream citedInputStream = InputStream citedInputStream =
new ByteArrayInputStream( new ByteArrayInputStream(
citationDocument.makeCitedDocument(Curator.curationContext(), bitstream).getLeft()); citationDocument.makeCitedDocument(Curator.curationContext(), bitstream).getLeft());
//Add the cited document to the approiate bundle //Add the cited document to the appropriate bundle
this.addCitedPageToItem(citedInputStream, bundle, pBundle, this.addCitedPageToItem(citedInputStream, bundle, pBundle,
dBundle, item, bitstream); dBundle, item, bitstream);
// now set the policies of the preservation and display bundle // now set the policies of the preservation and display bundle

View File

@@ -16,7 +16,7 @@ import org.dspace.core.Context;
* ScriptedTask describes a rather generic ability to perform an operation * ScriptedTask describes a rather generic ability to perform an operation
* upon a DSpace object. It's semantics are identical to the CurationTask interface, * upon a DSpace object. It's semantics are identical to the CurationTask interface,
* but is designed to be implemented in scripting languages, rather than * but is designed to be implemented in scripting languages, rather than
* Java. For this reason, the 'perform' methods are renamed to accomodate * Java. For this reason, the 'perform' methods are renamed to accommodate
* languages (like Ruby) that lack method overloading. * languages (like Ruby) that lack method overloading.
* *
* @author richardrodgers * @author richardrodgers

View File

@@ -94,7 +94,7 @@ public enum IndexClientOptions {
options.addOption("b", "build", false, "(re)build index, wiping out current one if it exists"); options.addOption("b", "build", false, "(re)build index, wiping out current one if it exists");
options.addOption("s", "spellchecker", false, "Rebuild the spellchecker, can be combined with -b and -f."); options.addOption("s", "spellchecker", false, "Rebuild the spellchecker, can be combined with -b and -f.");
options.addOption("f", "force", false, options.addOption("f", "force", false,
"if updating existing index, force each handle to be reindexed even if uptodate"); "if updating existing index, force each handle to be reindexed even if up-to-date");
options.addOption("h", "help", false, "print this help message"); options.addOption("h", "help", false, "print this help message");
return options; return options;
} }

View File

@@ -79,7 +79,7 @@ public interface IndexingService {
/** /**
* Atomically update the index of a single field for an object * Atomically update the index of a single field for an object
* @param context The DSpace context * @param context The DSpace context
* @param uniqueIndexId The unqiue index ID of the object to update the index for * @param uniqueIndexId The unique index ID of the object to update the index for
* @param field The field to update * @param field The field to update
* @param fieldModifier The modifiers for the field to update. More information on how to atomically update a solr * @param fieldModifier The modifiers for the field to update. More information on how to atomically update a solr
* field using a field modifier can be found here: https://yonik.com/solr/atomic-updates/ * field using a field modifier can be found here: https://yonik.com/solr/atomic-updates/

View File

@@ -88,7 +88,7 @@ public class SearchUtils {
/** /**
* Retrieves the Discovery Configuration with a null prefix for a DSpace object. * Retrieves the Discovery Configuration with a null prefix for a DSpace object.
* @param context * @param context
* the dabase context * the database context
* @param dso * @param dso
* the DSpace object * the DSpace object
* @return the Discovery Configuration for the specified DSpace object * @return the Discovery Configuration for the specified DSpace object

View File

@@ -1230,7 +1230,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
} catch (IOException | SQLException | SolrServerException e) { } catch (IOException | SQLException | SolrServerException e) {
// Any acception that we get ignore it. // Any acception that we get ignore it.
// We do NOT want any crashed to shown by the user // We do NOT want any crashed to shown by the user
log.error(LogHelper.getHeader(context, "Error while quering solr", "Query: " + query), e); log.error(LogHelper.getHeader(context, "Error while querying solr", "Query: " + query), e);
return new ArrayList<>(0); return new ArrayList<>(0);
} }
} }
@@ -1359,7 +1359,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
* Gets the solr field that contains the facet value split on each word break to the end, so can be searched * Gets the solr field that contains the facet value split on each word break to the end, so can be searched
* on each word in the value, see {@link org.dspace.discovery.indexobject.ItemIndexFactoryImpl * on each word in the value, see {@link org.dspace.discovery.indexobject.ItemIndexFactoryImpl
* #saveFacetPrefixParts(SolrInputDocument, DiscoverySearchFilter, String, String)} * #saveFacetPrefixParts(SolrInputDocument, DiscoverySearchFilter, String, String)}
* Ony applicable to facets of type {@link DiscoveryConfigurationParameters.TYPE_TEXT}, otherwise uses the regular * Only applicable to facets of type {@link DiscoveryConfigurationParameters.TYPE_TEXT}, otherwise uses the regular
* facet filter field * facet filter field
*/ */
protected String transformPrefixFacetField(DiscoverFacetField facetFieldConfig, String field, protected String transformPrefixFacetField(DiscoverFacetField facetFieldConfig, String field,

View File

@@ -79,7 +79,7 @@ public class SolrServiceMetadataBrowseIndexingPlugin implements SolrServiceIndex
// Faceting for metadata browsing. It is different than search facet // Faceting for metadata browsing. It is different than search facet
// because if there are authority with variants support we want all the // because if there are authority with variants support we want all the
// variants to go in the facet... they are sorted by count so just the // variants to go in the facet... they are sorted by count so just the
// prefered label is relevant // preferred label is relevant
for (BrowseIndex bi : bis) { for (BrowseIndex bi : bis) {
log.debug("Indexing for item " + item.getID() + ", for index: " log.debug("Indexing for item " + item.getID() + ", for index: "
+ bi.getTableName()); + bi.getTableName());
@@ -280,7 +280,7 @@ public class SolrServiceMetadataBrowseIndexingPlugin implements SolrServiceIndex
} }
} }
// Add sorting options as configurated for the browse system // Add sorting options as configured for the browse system
try { try {
for (SortOption so : SortOption.getSortOptions()) { for (SortOption so : SortOption.getSortOptions()) {
List<MetadataValue> dcvalue = itemService.getMetadataByMetadataString(item, so.getMetadata()); List<MetadataValue> dcvalue = itemService.getMetadataByMetadataString(item, so.getMetadata());

View File

@@ -57,7 +57,7 @@ public class DiscoverySearchFilter {
* For the DiscoverySearchFilter only the TYPE_TEXT, TYPE_DATE and TYPE_HIERARCHICAL are allowed * For the DiscoverySearchFilter only the TYPE_TEXT, TYPE_DATE and TYPE_HIERARCHICAL are allowed
* *
* @param type The type for this DiscoverySearchFilter * @param type The type for this DiscoverySearchFilter
* @throws DiscoveryConfigurationException If none of the types match, this error will be thrown indiciating this * @throws DiscoveryConfigurationException If none of the types match, this error will be thrown indicating this
*/ */
public void setType(String type) throws DiscoveryConfigurationException { public void setType(String type) throws DiscoveryConfigurationException {
if (type.equalsIgnoreCase(DiscoveryConfigurationParameters.TYPE_TEXT)) { if (type.equalsIgnoreCase(DiscoveryConfigurationParameters.TYPE_TEXT)) {

View File

@@ -174,7 +174,7 @@ public class EPerson extends CacheableDSpaceObject implements DSpaceObjectLegacy
/** /**
* Set the EPerson's language. Value is expected to be a Unix/POSIX * Set the EPerson's language. Value is expected to be a Unix/POSIX
* Locale specification of the form {language} or {language}_{territory}, * Locale specification of the form {language} or {language}_{territory},
* e.g. "en", "en_US", "pt_BR" (the latter is Brazilian Portugese). * e.g. "en", "en_US", "pt_BR" (the latter is Brazilian Portuguese).
* *
* @param context The relevant DSpace Context. * @param context The relevant DSpace Context.
* @param language language code * @param language language code

View File

@@ -47,7 +47,7 @@ public class EPersonConsumer implements Consumer {
= DSpaceServicesFactory.getInstance().getConfigurationService(); = DSpaceServicesFactory.getInstance().getConfigurationService();
/** /**
* Initalise the consumer * Initialise the consumer
* *
* @throws Exception if error * @throws Exception if error
*/ */

View File

@@ -343,11 +343,11 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl<EPerson> impleme
try { try {
delete(context, ePerson, true); delete(context, ePerson, true);
} catch (AuthorizeException ex) { } catch (AuthorizeException ex) {
log.error("This AuthorizeException: " + ex + " occured while deleting Eperson with the ID: " + log.error("This AuthorizeException: " + ex + " occurred while deleting Eperson with the ID: " +
ePerson.getID()); ePerson.getID());
throw new AuthorizeException(ex); throw new AuthorizeException(ex);
} catch (IOException ex) { } catch (IOException ex) {
log.error("This IOException: " + ex + " occured while deleting Eperson with the ID: " + ePerson.getID()); log.error("This IOException: " + ex + " occurred while deleting Eperson with the ID: " + ePerson.getID());
throw new AuthorizeException(ex); throw new AuthorizeException(ex);
} catch (EPersonDeletionException e) { } catch (EPersonDeletionException e) {
throw new IllegalStateException(e); throw new IllegalStateException(e);
@@ -451,7 +451,7 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl<EPerson> impleme
ePerson, task.getStepID()); ePerson, task.getStepID());
} catch (WorkflowConfigurationException ex) { } catch (WorkflowConfigurationException ex) {
log.error("This WorkflowConfigurationException: " + ex + log.error("This WorkflowConfigurationException: " + ex +
" occured while deleting Eperson with the ID: " + ePerson.getID()); " occurred while deleting Eperson with the ID: " + ePerson.getID());
throw new AuthorizeException(new EPersonDeletionException(Collections throw new AuthorizeException(new EPersonDeletionException(Collections
.singletonList(tableName))); .singletonList(tableName)));
} }

View File

@@ -193,7 +193,7 @@ public class Event implements Serializable {
* Contains all identifiers of the DSpaceObject that was changed (added, * Contains all identifiers of the DSpaceObject that was changed (added,
* modified, deleted, ...). * modified, deleted, ...).
* *
* All events gets fired when a context that contains events gets commited. * All events gets fired when a context that contains events gets committed.
* When the delete event is fired, a deleted DSpaceObject is already gone. * When the delete event is fired, a deleted DSpaceObject is already gone.
* This array contains all identifiers of the object, not only the handle * This array contains all identifiers of the object, not only the handle
* as the detail field does. The field may be an empty array if no * as the detail field does. The field may be an empty array if no

View File

@@ -107,7 +107,7 @@ public class EventServiceImpl implements EventService {
try { try {
return (Dispatcher) dispatcherPool.borrowObject(name); return (Dispatcher) dispatcherPool.borrowObject(name);
} catch (Exception e) { } catch (Exception e) {
throw new IllegalStateException("Unable to aquire dispatcher named " + name, e); throw new IllegalStateException("Unable to acquire dispatcher named " + name, e);
} }
} }
@@ -153,7 +153,7 @@ public class EventServiceImpl implements EventService {
// Prefix of keys in DSpace Configuration // Prefix of keys in DSpace Configuration
private static final String PROP_PFX = "event.dispatcher"; private static final String PROP_PFX = "event.dispatcher";
// Cache of event dispatchers, keyed by name, for re-use. // Cache of event dispatchers, keyed by name, for reuse.
protected Map<String, String> dispatchers = new HashMap<String, String>(); protected Map<String, String> dispatchers = new HashMap<String, String>();
public DispatcherPoolFactory() { public DispatcherPoolFactory() {

View File

@@ -27,7 +27,7 @@ public interface EventService {
* if one exists. * if one exists.
* *
* @param name dispatcher name * @param name dispatcher name
* @return chached instance of dispatcher * @return cached instance of dispatcher
*/ */
public Dispatcher getDispatcher(String name); public Dispatcher getDispatcher(String name);

View File

@@ -26,7 +26,7 @@ import org.dspace.external.provider.AbstractExternalDataProvider;
/** /**
* This class is the implementation of the ExternalDataProvider interface that will deal with SherpaJournal External * This class is the implementation of the ExternalDataProvider interface that will deal with SherpaJournal External
* data lookups based on ISSN (to match functinoality offered by legacy SHERPASubmitService for policy lookups * data lookups based on ISSN (to match functionality offered by legacy SHERPASubmitService for policy lookups
* at the time of submission). * at the time of submission).
* This provider is a refactored version of SherpaJournalDataPublisher, rewritten to work with SHERPA v2 API * This provider is a refactored version of SherpaJournalDataPublisher, rewritten to work with SHERPA v2 API
* *

View File

@@ -58,7 +58,7 @@ public interface ExternalDataService {
public List<ExternalDataObject> searchExternalDataObjects(String source, String query, int start, int limit); public List<ExternalDataObject> searchExternalDataObjects(String source, String query, int start, int limit);
/** /**
* This method wil return the total amount of results that will be found for the given query in the given source * This method will return the total amount of results that will be found for the given query in the given source
* @param source The source in which the query will happen to return the number of results * @param source The source in which the query will happen to return the number of results
* @param query The query to be ran in this source to retrieve the total amount of results * @param query The query to be ran in this source to retrieve the total amount of results
* @return The total amount of results that can be returned for this query in the given source * @return The total amount of results that can be returned for this query in the given source

View File

@@ -37,7 +37,7 @@ import org.springframework.beans.factory.annotation.Autowired;
/** /**
* Notifies Google Analytics of Bitstream VIEW events. These events are stored in memory and then * Notifies Google Analytics of Bitstream VIEW events. These events are stored in memory and then
* asynchronously processed by a single seperate thread. * asynchronously processed by a single separate thread.
* *
* @author April Herron * @author April Herron
* @author Luca Giamminonni * @author Luca Giamminonni
@@ -142,7 +142,7 @@ public class GoogleAsyncEventListener extends AbstractUsageEventListener {
/** /**
* Client ID, should uniquely identify the user or device. If we have an * Client ID, should uniquely identify the user or device. If we have an
* X-CORRELATION-ID header or a session ID for the user, then lets use it, * X-CORRELATION-ID header or a session ID for the user, then lets use it,
* othwerwise generate a UUID. * otherwise generate a UUID.
*/ */
private String getClientId(UsageEvent usageEvent) { private String getClientId(UsageEvent usageEvent) {
if (usageEvent.getRequest().getHeader("X-CORRELATION-ID") != null) { if (usageEvent.getRequest().getHeader("X-CORRELATION-ID") != null) {

View File

@@ -312,7 +312,7 @@ public class HandleServiceImpl implements HandleService {
Handle dbHandle = findHandleInternal(context, handle); Handle dbHandle = findHandleInternal(context, handle);
if (dbHandle != null) { if (dbHandle != null) {
// Check if we have to remove the handle from the current handle list // Check if we have to remove the handle from the current handle list
// or if object is alreday deleted. // or if object is already deleted.
if (dbHandle.getDSpaceObject() != null) { if (dbHandle.getDSpaceObject() != null) {
// Remove the old handle from the current handle list // Remove the old handle from the current handle list
dbHandle.getDSpaceObject().getHandles().remove(dbHandle); dbHandle.getDSpaceObject().getHandles().remove(dbHandle);

View File

@@ -57,7 +57,7 @@ public class HdlResolverDTO {
} }
/** /**
* Returns the splitted String of the resource-path * Returns the split String of the resource-path
* *
* @return * @return
*/ */

View File

@@ -58,12 +58,12 @@ public class HarvestThread extends Thread {
} catch (RuntimeException e) { } catch (RuntimeException e) {
log.error("Runtime exception in thread: " + this.toString()); log.error("Runtime exception in thread: " + this.toString());
log.error(e.getMessage() + " " + e.getCause()); log.error(e.getMessage() + " " + e.getCause());
hc.setHarvestMessage("Runtime error occured while generating an OAI response"); hc.setHarvestMessage("Runtime error occurred while generating an OAI response");
hc.setHarvestStatus(HarvestedCollection.STATUS_UNKNOWN_ERROR); hc.setHarvestStatus(HarvestedCollection.STATUS_UNKNOWN_ERROR);
} catch (Exception ex) { } catch (Exception ex) {
log.error("General exception in thread: " + this.toString()); log.error("General exception in thread: " + this.toString());
log.error(ex.getMessage() + " " + ex.getCause()); log.error(ex.getMessage() + " " + ex.getCause());
hc.setHarvestMessage("Error occured while generating an OAI response"); hc.setHarvestMessage("Error occurred while generating an OAI response");
hc.setHarvestStatus(HarvestedCollection.STATUS_UNKNOWN_ERROR); hc.setHarvestStatus(HarvestedCollection.STATUS_UNKNOWN_ERROR);
} finally { } finally {
try { try {

View File

@@ -286,8 +286,8 @@ public class DOIIdentifierProvider extends FilteredIdentifierProvider {
try { try {
doiRow = loadOrCreateDOI(context, dso, doi, filter); doiRow = loadOrCreateDOI(context, dso, doi, filter);
} catch (SQLException ex) { } catch (SQLException ex) {
log.error("Error in databse connection: {}", ex::getMessage); log.error("Error in database connection: {}", ex::getMessage);
throw new RuntimeException("Error in database conncetion.", ex); throw new RuntimeException("Error in database connection.", ex);
} }
if (DELETED.equals(doiRow.getStatus()) || if (DELETED.equals(doiRow.getStatus()) ||
@@ -473,7 +473,7 @@ public class DOIIdentifierProvider extends FilteredIdentifierProvider {
/** /**
* Update metadata for a registered object * Update metadata for a registered object
* If the DOI for hte item already exists, *always* skip the filter since it should only be used for * If the DOI for the item already exists, *always* skip the filter since it should only be used for
* allowing / disallowing reservation and registration, not metadata updates or deletions * allowing / disallowing reservation and registration, not metadata updates or deletions
* *
* @param context - DSpace context * @param context - DSpace context
@@ -525,7 +525,7 @@ public class DOIIdentifierProvider extends FilteredIdentifierProvider {
/** /**
* Update metadata for a registered object in the DOI Connector to update the agency records * Update metadata for a registered object in the DOI Connector to update the agency records
* If the DOI for hte item already exists, *always* skip the filter since it should only be used for * If the DOI for the item already exists, *always* skip the filter since it should only be used for
* allowing / disallowing reservation and registration, not metadata updates or deletions * allowing / disallowing reservation and registration, not metadata updates or deletions
* *
* @param context - DSpace context * @param context - DSpace context
@@ -611,7 +611,7 @@ public class DOIIdentifierProvider extends FilteredIdentifierProvider {
try { try {
doi = getDOIByObject(context, dso); doi = getDOIByObject(context, dso);
} catch (SQLException e) { } catch (SQLException e) {
log.error("Error while attemping to retrieve information about a DOI for {} with ID {}.", log.error("Error while attempting to retrieve information about a DOI for {} with ID {}.",
contentServiceFactory.getDSpaceObjectService(dso).getTypeText(dso), dso.getID()); contentServiceFactory.getDSpaceObjectService(dso).getTypeText(dso), dso.getID());
throw new RuntimeException("Error while attempting to retrieve " + throw new RuntimeException("Error while attempting to retrieve " +
"information about a DOI for " + contentServiceFactory.getDSpaceObjectService(dso).getTypeText(dso) + "information about a DOI for " + contentServiceFactory.getDSpaceObjectService(dso).getTypeText(dso) +
@@ -709,7 +709,7 @@ public class DOIIdentifierProvider extends FilteredIdentifierProvider {
doi = getDOIByObject(context, dso); doi = getDOIByObject(context, dso);
} }
} catch (SQLException ex) { } catch (SQLException ex) {
log.error("Error while attemping to retrieve information about a DOI for {} with ID {}.", log.error("Error while attempting to retrieve information about a DOI for {} with ID {}.",
contentServiceFactory.getDSpaceObjectService(dso).getTypeText(dso), contentServiceFactory.getDSpaceObjectService(dso).getTypeText(dso),
dso.getID(), ex); dso.getID(), ex);
throw new RuntimeException("Error while attempting to retrieve " + throw new RuntimeException("Error while attempting to retrieve " +

View File

@@ -137,8 +137,9 @@ public class VersionedDOIIdentifierProvider extends DOIIdentifierProvider implem
loadOrCreateDOI(context, dso, versionedDOI, filter); loadOrCreateDOI(context, dso, versionedDOI, filter);
} catch (SQLException ex) { } catch (SQLException ex) {
log.error( log.error(
"A problem with the database connection occurd while processing DOI " + versionedDOI + ".", ex); "A problem with the database connection occurred while processing DOI " + versionedDOI + ".",
throw new RuntimeException("A problem with the database connection occured.", ex); ex);
throw new RuntimeException("A problem with the database connection occurred.", ex);
} }
return versionedDOI; return versionedDOI;
} }
@@ -350,14 +351,14 @@ public class VersionedDOIIdentifierProvider extends DOIIdentifierProvider implem
changed = true; changed = true;
} }
} }
// reset the metadata if neccessary. // reset the metadata if necessary.
if (changed) { if (changed) {
try { try {
itemService.clearMetadata(c, item, MD_SCHEMA, DOI_ELEMENT, DOI_QUALIFIER, Item.ANY); itemService.clearMetadata(c, item, MD_SCHEMA, DOI_ELEMENT, DOI_QUALIFIER, Item.ANY);
itemService.addMetadata(c, item, MD_SCHEMA, DOI_ELEMENT, DOI_QUALIFIER, null, newIdentifiers); itemService.addMetadata(c, item, MD_SCHEMA, DOI_ELEMENT, DOI_QUALIFIER, null, newIdentifiers);
itemService.update(c, item); itemService.update(c, item);
} catch (SQLException ex) { } catch (SQLException ex) {
throw new RuntimeException("A problem with the database connection occured.", ex); throw new RuntimeException("A problem with the database connection occurred.", ex);
} }
} }
} }

Some files were not shown because too many files have changed in this diff Show More