Fix typos discovered by codespell

This commit is contained in:
Christian Clauss
2024-08-05 22:05:13 +02:00
parent 8e82640cb3
commit dbf33f2113
263 changed files with 443 additions and 442 deletions

View File

@@ -18,7 +18,7 @@ import org.dspace.core.Context;
* Configuration properties: (with examples)
* {@code
* # values for the forever embargo date threshold
* # This threshold date is used in the default access status helper to dermine if an item is
* # This threshold date is used in the default access status helper to determine if an item is
* # restricted or embargoed based on the start date of the primary (or first) file policies.
* # In this case, if the policy start date is inferior to the threshold date, the status will
* # be embargo, else it will be restricted.

View File

@@ -475,7 +475,7 @@ public class DSpaceCSV implements Serializable {
key = key + "." + metadataField.getQualifier();
}
// Add the language if there is one (schema.element.qualifier[langauge])
// Add the language if there is one (schema.element.qualifier[language])
//if ((value.language != null) && (!"".equals(value.language)))
if (value.getLanguage() != null) {
key = key + "[" + value.getLanguage() + "]";

View File

@@ -253,7 +253,7 @@ public class MetadataImport extends DSpaceRunnable<MetadataImportScriptConfigura
displayChanges(changes, true);
}
// Finsh off and tidy up
// Finish off and tidy up
c.restoreAuthSystemState();
c.complete();
} catch (Exception e) {
@@ -1653,7 +1653,7 @@ public class MetadataImport extends DSpaceRunnable<MetadataImportScriptConfigura
.getLabel();
} else {
// Target item may be archived; check there.
// Add to errors if Realtionship.type cannot be derived
// Add to errors if Relationship.type cannot be derived
Item targetItem = null;
if (itemService.find(c, UUID.fromString(targetUUID)) != null) {
targetItem = itemService.find(c, UUID.fromString(targetUUID));
@@ -1698,7 +1698,7 @@ public class MetadataImport extends DSpaceRunnable<MetadataImportScriptConfigura
validateTypesByTypeByTypeName(c, targetType, originType, typeName, originRow);
} else {
// Origin item may be archived; check there.
// Add to errors if Realtionship.type cannot be derived.
// Add to errors if Relationship.type cannot be derived.
Item originItem = null;
if (itemService.find(c, UUID.fromString(targetUUID)) != null) {
DSpaceCSVLine dSpaceCSVLine = this.csv.getCSVLines()

View File

@@ -725,7 +725,7 @@ public class ItemExportServiceImpl implements ItemExportService {
try {
emailErrorMessage(eperson, e1.getMessage());
} catch (Exception e) {
// wont throw here
// won't throw here
}
throw new IllegalStateException(e1);
} finally {

View File

@@ -69,7 +69,7 @@ public interface ItemExportService {
boolean excludeBitstreams) throws Exception;
/**
* Convenience methot to create export a single Community, Collection, or
* Convenience method to create export a single Community, Collection, or
* Item
*
* @param dso - the dspace object to export
@@ -93,7 +93,7 @@ public interface ItemExportService {
Context context, boolean migrate) throws Exception;
/**
* Convenience methot to create export a single Community, Collection, or
* Convenience method to create export a single Community, Collection, or
* Item
*
* @param dso - the dspace object to export
@@ -156,7 +156,7 @@ public interface ItemExportService {
public String getExportWorkDirectory() throws Exception;
/**
* Used to read the export archived. Inteded for download.
* Used to read the export archived. Intended for download.
*
* @param fileName the name of the file to download
* @param eperson the eperson requesting the download
@@ -233,7 +233,7 @@ public interface ItemExportService {
/**
* Since the archive is created in a new thread we are unable to communicate
* with calling method about success or failure. We accomplis this
* with calling method about success or failure. We accomplish this
* communication with email instead. Send a success email once the export
* archive is complete and ready for download
*
@@ -248,7 +248,7 @@ public interface ItemExportService {
/**
* Since the archive is created in a new thread we are unable to communicate
* with calling method about success or failure. We accomplis this
* with calling method about success or failure. We accomplish this
* communication with email instead. Send an error email if the export
* archive fails
*

View File

@@ -2210,7 +2210,7 @@ public class ItemImportServiceImpl implements ItemImportService, InitializingBea
emailErrorMessage(eperson, exceptionString);
throw new Exception(e.getMessage());
} catch (Exception e2) {
// wont throw here
// won't throw here
}
} finally {
// Make sure the database connection gets closed in all conditions.

View File

@@ -121,7 +121,7 @@ public interface ItemImportService {
/**
* If a batch import is done in a new thread we are unable to communicate
* with calling method about success or failure. We accomplis this
* with calling method about success or failure. We accomplish this
* communication with email instead. Send an error email if the batch
* import fails
*

View File

@@ -217,7 +217,7 @@ public class ItemArchive {
throws SQLException, Exception {
DtoMetadata dtom = getMetadataField("dc.identifier.uri");
if (dtom == null) {
throw new Exception("No dc.identier.uri field found for handle");
throw new Exception("No dc.identifier.uri field found for handle");
}
this.addUndoMetadataField(dtom); //seed the undo list with the uri

View File

@@ -10,7 +10,7 @@ package org.dspace.app.itemupdate;
import java.util.Properties;
/**
* Bitstream filter targetting the THUMBNAIL bundle
* Bitstream filter targeting the THUMBNAIL bundle
*/
public class ThumbnailBitstreamFilter extends BitstreamFilterByBundleName {

View File

@@ -27,7 +27,7 @@ import org.dspace.services.ConfigurationService;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Action to send email to receipients provided in actionSendFilter. The email
* Action to send email to recipients provided in actionSendFilter. The email
* body will be result of templating actionSendFilter.
*/
public class LDNEmailAction implements LDNAction {

View File

@@ -25,7 +25,7 @@ import com.fasterxml.jackson.annotation.JsonPropertyOrder;
* "Offer", "coar-notify:IngestAction"
* "Offer", "coar-notify:ReviewAction"
*
* and their acknownledgements - if any
* and their acknowledgements - if any
*
* @author Francesco Bacchelli (francesco.bacchelli at 4science dot it)
*/

View File

@@ -8,7 +8,7 @@
package org.dspace.app.ldn.model;
/**
* Informations about the Offer and Acknowledgements targeting a specified Item
* Information about the Offer and Acknowledgements targeting a specified Item
*
* @author Francesco Bacchelli (francesco.bacchelli at 4science.com)
*/

View File

@@ -152,7 +152,7 @@ public class LDNMetadataProcessor implements LDNProcessor {
}
/**
* Lookup associated item to the notification context. If UUID in URL, lookup bu
* Lookup associated item to the notification context. If UUID in URL, lookup by
* UUID, else lookup by handle.
*
* @param context current context

View File

@@ -148,7 +148,7 @@ public abstract class ImageMagickThumbnailFilter extends MediaFilter {
// the thumbnail because the CropBox is generally used to define the
// area displayed when a user opens the PDF on a screen, whereas the
// MediaBox is used for print. Not all PDFs set these correctly, so
// we can use ImageMagick's default behavior unless we see an explit
// we can use ImageMagick's default behavior unless we see an explicit
// CropBox. Note: we don't need to do anything special to detect if
// the CropBox is missing or empty because pdfbox will set it to the
// same size as the MediaBox if it doesn't exist. Also note that we

View File

@@ -78,7 +78,7 @@ public class SHERPAService {
@SuppressWarnings("unused")
@PostConstruct
private void init() {
// Get endoint and API key from configuration
// Get endpoint and API key from configuration
endpoint = configurationService.getProperty("sherpa.romeo.url",
"https://v2.sherpa.ac.uk/cgi/retrieve");
apiKey = configurationService.getProperty("sherpa.romeo.apikey");
@@ -156,7 +156,7 @@ public class SHERPAService {
// If the response body is valid, pass to SHERPAResponse for parsing as JSON
if (null != responseBody) {
log.debug("Non-null SHERPA resonse received for query of " + value);
log.debug("Non-null SHERPA response received for query of " + value);
InputStream content = null;
try {
content = responseBody.getContent();
@@ -259,7 +259,7 @@ public class SHERPAService {
// If the response body is valid, pass to SHERPAResponse for parsing as JSON
if (null != responseBody) {
log.debug("Non-null SHERPA resonse received for query of " + value);
log.debug("Non-null SHERPA response received for query of " + value);
InputStream content = null;
try {
content = responseBody.getContent();

View File

@@ -13,7 +13,7 @@ import org.ehcache.event.CacheEvent;
import org.ehcache.event.CacheEventListener;
/**
* This is a EHCache listner responsible for logging sherpa cache events. It is
* This is a EHCache listener responsible for logging sherpa cache events. It is
* bound to the sherpa cache via the dspace/config/ehcache.xml file. We need a
* dedicated Logger for each cache as the CacheEvent doesn't include details
* about where the event occur

View File

@@ -47,7 +47,7 @@ public class SHERPASubmitService {
}
/**
* Setter for SHERPA service, reponsible for actual HTTP API calls
* Setter for SHERPA service, responsible for actual HTTP API calls
* @see "dspace-dspace-addon-sherpa-configuration-services.xml"
* @param sherpaService
*/

View File

@@ -141,7 +141,7 @@ public class GenerateSitemaps {
public static void deleteSitemaps() throws IOException {
File outputDir = new File(configurationService.getProperty("sitemap.dir"));
if (!outputDir.exists() && !outputDir.isDirectory()) {
log.error("Unable to delete sitemaps directory, doesn't exist or isn't a directort");
log.error("Unable to delete sitemaps directory, doesn't exist or isn't a directory");
} else {
FileUtils.deleteDirectory(outputDir);
}

View File

@@ -451,7 +451,7 @@ public class HTMLReport implements Report {
}
/**
* Clean Stirngs for display in HTML
* Clean Strings for display in HTML
*
* @param s The String to clean
* @return The cleaned String

View File

@@ -481,7 +481,7 @@ public class LogAnalyser {
// of the log file are sequential, but can we assume the files are
// provided in a data sequence?
for (i = 0; i < logFiles.length; i++) {
// check to see if this file is a log file agains the global regex
// check to see if this file is a log file against the global regex
Matcher matchRegex = logRegex.matcher(logFiles[i].getName());
if (matchRegex.matches()) {
// if it is a log file, open it up and lets have a look at the

View File

@@ -352,7 +352,7 @@ public class ReportGenerator {
report.setEndDate(endDate);
report.setMainTitle(name, serverName);
// define our standard variables for re-use
// define our standard variables for reuse
// FIXME: we probably don't need these once we've finished re-factoring
Iterator<String> keys = null;
int i = 0;
@@ -518,7 +518,7 @@ public class ReportGenerator {
/**
* a standard stats block preparation method for use when an aggregator
* has to be put out in its entirity. This method will not be able to
* has to be put out in its entirety. This method will not be able to
* deal with complex cases, although it will perform sorting by value and
* translations as per the map file if requested
*
@@ -783,7 +783,7 @@ public class ReportGenerator {
return null;
}
// build the referece
// build the reference
// FIXME: here we have blurred the line between content and presentation
// and it should probably be un-blurred
List<MetadataValue> title = itemService.getMetadata(item, MetadataSchemaEnum.DC.getName(),

View File

@@ -291,7 +291,7 @@ public class StatisticsLoader {
* by the formatter provided, then we return null.
*
* @param thisFile file
* @param thisPattern patter
* @param thisPattern pattern
* @param sdf date format
* @return StatsFile
*/

View File

@@ -22,7 +22,7 @@ public interface SuggestionService {
/** find a {@link SuggestionTarget } by source name and suggestion id */
public SuggestionTarget find(Context context, String source, UUID id);
/** count all suggetion targets by suggestion source */
/** count all suggestion targets by suggestion source */
public long countAll(Context context, String source);
/** find all suggestion targets by source (paged) */

View File

@@ -624,7 +624,7 @@ public class AuthorizeUtil {
throws SQLException {
if (DSpaceServicesFactory.getInstance().getConfigurationService()
.getBooleanProperty("user.registration", true)) {
// This allowSetPassword is currently the only mthod that would return true only when it's
// This allowSetPassword is currently the only method that would return true only when it's
// actually expected to be returning true.
// For example the LDAP canSelfRegister will return true due to auto-register, while that
// does not imply a new user can register explicitly

View File

@@ -62,7 +62,7 @@ public class IndexVersion {
// First argument is the Index path. Determine its version
String indexVersion = getIndexVersion(argv[0]);
// Second argumet is an optional version number to compare to
// Second argument is an optional version number to compare to
String compareToVersion = argv.length > 1 ? argv[1] : null;
// If indexVersion comes back as null, then it is not a valid index directory.

View File

@@ -74,7 +74,7 @@ public class InitializeEntities {
private static void checkHelpEntered(Options options, CommandLine line) {
if (line.hasOption("h")) {
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("Intialize Entities", options);
formatter.printHelp("Initialize Entities", options);
System.exit(0);
}
}

View File

@@ -36,7 +36,7 @@ public class RegexPatternUtils {
* Computes a pattern starting from a regex definition with flags that
* uses the standard format: <code>/{regex}/{flags}</code> (ECMAScript format).
* This method can transform an ECMAScript regex into a java {@code Pattern} object
* wich can be used to validate strings.
* which can be used to validate strings.
* <br/>
* If regex is null, empty or blank a null {@code Pattern} will be retrieved
* If it's a valid regex, then a non-null {@code Pattern} will be retrieved,

View File

@@ -158,7 +158,7 @@ public class SubmissionConfigReader {
* <li>Hashmap of Collection to Submission definition mappings -
* defines which Submission process a particular collection uses
* <li>Hashmap of all Submission definitions. List of all valid
* Submision Processes by name.
* Submission Processes by name.
* </ul>
*/
private void buildInputs(String fileName) throws SubmissionConfigReaderException {
@@ -358,7 +358,7 @@ public class SubmissionConfigReader {
throws SubmissionConfigReaderException {
// We should already have the step definitions loaded
if (stepDefns != null) {
// retreive step info
// retrieve step info
Map<String, String> stepInfo = stepDefns.get(stepID);
if (stepInfo != null) {

View File

@@ -523,7 +523,7 @@ public class SyndicationFeed {
*/
protected String resolveURL(HttpServletRequest request, DSpaceObject dso) {
// If no object given then just link to the whole repository,
// since no offical handle exists so we have to use local resolution.
// since no official handle exists so we have to use local resolution.
if (dso == null) {
if (baseURL == null) {
if (request == null) {

View File

@@ -179,7 +179,7 @@ public class Util {
* @return the file size as a String
*/
public static String formatFileSize(double in) {
// Work out the size of the file, and format appropriatly
// Work out the size of the file, and format appropriately
// FIXME: When full i18n support is available, use the user's Locale
// rather than the default Locale.
NumberFormat nf = NumberFormat.getNumberInstance(Locale.getDefault());
@@ -238,7 +238,7 @@ public class Util {
} catch (Exception e) {
// at least log this error to make debugging easier
// do not silently return null only.
log.warn("Unable to recoginze UUID from String \""
log.warn("Unable to recognize UUID from String \""
+ val + "\". Will return null.", e);
// Problem with parameter
return null;

View File

@@ -85,7 +85,7 @@ public interface OpenSearchService {
* @param start - start result index
* @param pageSize - page size
* @param scope - search scope, null or the community/collection
* @param results the retreived DSpace objects satisfying search
* @param results the retrieved DSpace objects satisfying search
* @param labels labels to apply - format specific
* @return formatted search results
* @throws IOException if IO error
@@ -105,7 +105,7 @@ public interface OpenSearchService {
* @param start - start result index
* @param pageSize - page size
* @param scope - search scope, null or the community/collection
* @param results the retreived DSpace objects satisfying search
* @param results the retrieved DSpace objects satisfying search
* @param labels labels to apply - format specific
* @return formatted search results
* @throws IOException if IO error

View File

@@ -235,7 +235,7 @@ public class OidcAuthenticationBean implements AuthenticationMethod {
try {
return oidcClient.getAccessToken(code);
} catch (Exception ex) {
LOGGER.error("An error occurs retriving the OIDC access_token", ex);
LOGGER.error("An error occurs retrieving the OIDC access_token", ex);
return null;
}
}
@@ -244,7 +244,7 @@ public class OidcAuthenticationBean implements AuthenticationMethod {
try {
return oidcClient.getUserInfo(accessToken);
} catch (Exception ex) {
LOGGER.error("An error occurs retriving the OIDC user info", ex);
LOGGER.error("An error occurs retrieving the OIDC user info", ex);
return Map.of();
}
}

View File

@@ -282,7 +282,7 @@ public class OrcidAuthenticationBean implements AuthenticationMethod {
try {
return orcidClient.getPerson(token.getAccessToken(), token.getOrcid());
} catch (Exception ex) {
LOGGER.error("An error occurs retriving the ORCID record with id {}",
LOGGER.error("An error occurs retrieving the ORCID record with id {}",
token.getOrcid(), ex);
return null;
}
@@ -320,7 +320,7 @@ public class OrcidAuthenticationBean implements AuthenticationMethod {
try {
return orcidClient.getAccessToken(code);
} catch (Exception ex) {
LOGGER.error("An error occurs retriving the ORCID access_token", ex);
LOGGER.error("An error occurs retrieving the ORCID access_token", ex);
return null;
}
}

View File

@@ -82,7 +82,7 @@ public class PasswordAuthentication
// No conditions set, so must be able to self register
return true;
} else {
// Itterate through all domains
// Iterate through all domains
String check;
email = email.trim().toLowerCase();
for (int i = 0; i < domains.length; i++) {

View File

@@ -287,7 +287,7 @@ public class ShibAuthentication implements AuthenticationMethod {
@Override
public List<Group> getSpecialGroups(Context context, HttpServletRequest request) {
try {
// User has not successfuly authenticated via shibboleth.
// User has not successfully authenticated via shibboleth.
if (request == null ||
context.getCurrentUser() == null) {
return Collections.EMPTY_LIST;
@@ -309,7 +309,7 @@ public class ShibAuthentication implements AuthenticationMethod {
if (ignoreScope && ignoreValue) {
throw new IllegalStateException(
"Both config parameters for ignoring an roll attributes scope and value are turned on, this is " +
"not a permissable configuration. (Note: ignore-scope defaults to true) The configuration " +
"not a permissible configuration. (Note: ignore-scope defaults to true) The configuration " +
"parameters are: 'authentication.shib.role-header.ignore-scope' and 'authentication.shib" +
".role-header.ignore-value'");
}
@@ -391,7 +391,7 @@ public class ShibAuthentication implements AuthenticationMethod {
return new ArrayList<>(groups);
} catch (Throwable t) {
log.error("Unable to validate any sepcial groups this user may belong too because of an exception.", t);
log.error("Unable to validate any special groups this user may belong too because of an exception.", t);
return Collections.EMPTY_LIST;
}
}
@@ -546,7 +546,7 @@ public class ShibAuthentication implements AuthenticationMethod {
/**
* Identify an existing EPerson based upon the shibboleth attributes provided on
* the request object. There are three cases where this can occurr, each as
* the request object. There are three cases where this can occur, each as
* a fallback for the previous method.
*
* 1) NetID from Shibboleth Header (best)
@@ -671,7 +671,7 @@ public class ShibAuthentication implements AuthenticationMethod {
if (!foundNetID && !foundEmail && !foundRemoteUser) {
log.error(
"Shibboleth authentication was not able to find a NetId, Email, or Tomcat Remote user for which to " +
"indentify a user from.");
"identify a user from.");
}
@@ -931,7 +931,7 @@ public class ShibAuthentication implements AuthenticationMethod {
"compatibility mode.");
return SUCCESS;
} else {
// Passsword failure
// Password failure
log.error(
"Shibboleth-based password authentication failed for user " + username + " because a bad password was" +
" supplied.");
@@ -944,7 +944,7 @@ public class ShibAuthentication implements AuthenticationMethod {
/**
* Initialize Shibboleth Authentication.
*
* During initalization the mapping of additional eperson metadata will be loaded from the DSpace.cfg
* During initialization the mapping of additional eperson metadata will be loaded from the DSpace.cfg
* and cached. While loading the metadata mapping this method will check the EPerson object to see
* if it supports the metadata field. If the field is not supported and autocreate is turned on then
* the field will be automatically created.
@@ -985,7 +985,7 @@ public class ShibAuthentication implements AuthenticationMethod {
String[] metadataParts = metadataString.split("=>");
if (metadataParts.length != 2) {
log.error("Unable to parse metadat mapping string: '" + metadataString + "'");
log.error("Unable to parse metadata mapping string: '" + metadataString + "'");
continue;
}

View File

@@ -137,7 +137,7 @@ public class PolicySet {
* otherwise add to existing policies
* @param clearOnly if non-null, only process bitstreams whose names contain filter
* @param name policy name
* @param description policy descrption
* @param description policy description
* @param startDate policy start date
* @param endDate policy end date
* @throws SQLException if database error

View File

@@ -17,7 +17,7 @@ import org.springframework.beans.factory.annotation.Autowired;
/**
* Implementation of {@link PasswordValidatorService} that verifies if the given
* passowrd matches the configured pattern.
* password matches the configured pattern.
*
* @author Luca Giamminonni (luca.giamminonni at 4science.it)
*/

View File

@@ -133,7 +133,7 @@ public interface AuthorizeService {
public boolean authorizeActionBoolean(Context c, DSpaceObject o, int a, boolean useInheritance) throws SQLException;
/**
* same authorize with a specif eperson (not the current user), returns boolean for those who don't want to deal
* same authorize with a specific eperson (not the current user), returns boolean for those who don't want to deal
* with
* catching exceptions.
*
@@ -235,7 +235,7 @@ public interface AuthorizeService {
* @param o DSpaceObject to add policy to
* @param actionID ID of action from <code>org.dspace.core.Constants</code>
* @param e eperson who can perform the action
* @param type policy type, deafult types are declared in the ResourcePolicy class
* @param type policy type, default types are declared in the ResourcePolicy class
* @throws SQLException if database error
* @throws AuthorizeException if current user in context is not authorized to add policies
*/
@@ -261,7 +261,7 @@ public interface AuthorizeService {
* @param o object to add policy for
* @param actionID ID of action from <code>org.dspace.core.Constants</code>
* @param g group to add policy for
* @param type policy type, deafult types are declared in the ResourcePolicy class
* @param type policy type, default types are declared in the ResourcePolicy class
* @throws SQLException if there's a database problem
* @throws AuthorizeException if the current user is not authorized to add this policy
*/

View File

@@ -195,7 +195,7 @@ public class BrowseIndex {
}
}
// for backward compatability we ignore the keywords
// for backward compatibility we ignore the keywords
// single and full here
if (!sortName.equalsIgnoreCase("single")
&& !sortName.equalsIgnoreCase("full")
@@ -597,7 +597,7 @@ public class BrowseIndex {
/**
* Is the browse index of display type single?
*
* @return true if singe, false if not
* @return true if single, false if not
*/
public boolean isMetadataIndex() {
return displayType != null && displayType.startsWith("metadata");

View File

@@ -100,7 +100,7 @@ public class CrossLinks {
// Resolve wildcards properly, eg. dc.subject.other matches a configuration for dc.subject.*
for (String key : links.keySet()) {
if (null != key && key.endsWith(".*")) {
// A substring of length-1, also substracting the wildcard should work as a "startsWith"
// A substring of length-1, also subtracting the wildcard should work as a "startsWith"
// check for the field eg. dc.subject.* -> dc.subject is the start of dc.subject.other
if (null != metadata && metadata.startsWith(key.substring(0, key.length() - 1 - ".*".length()))) {
return links.get(key);

View File

@@ -60,7 +60,7 @@ public class LimitedCountDispatcher implements BitstreamDispatcher {
}
/**
* Retreives the next bitstream to be checked.
* Retrieves the next bitstream to be checked.
*
* @return the bitstream
* @throws SQLException if database error

View File

@@ -131,7 +131,7 @@ public class Bundle extends DSpaceObject implements DSpaceObjectLegacySupport {
/**
* Get a copy of the bitstream list of this bundle
* Note that this is a copy and if you wish to manipulate the bistream list, you should use
* Note that this is a copy and if you wish to manipulate the bitstream list, you should use
* {@ref Bundle.addBitstream}, {@ref Bundle.removeBitstream} or {@ref Bundle.clearBitstreams}
*
* @return the bitstreams

View File

@@ -155,7 +155,7 @@ public class Collection extends CacheableDSpaceObject implements DSpaceObjectLeg
/**
* Set the default group of submitters
*
* Package protected in order to preven unauthorized calls to this method
* Package protected in order to prevent unauthorized calls to this method
*
* @param submitters the group of submitters
*/

View File

@@ -323,7 +323,7 @@ public abstract class DSpaceObjectServiceImpl<T extends DSpaceObject> implements
}
}
metadataValue.setValue(String.valueOf(dcvalue));
//An update here isn't needed, this is persited upon the merge of the owning object
//An update here isn't needed, this is persisted upon the merge of the owning object
// metadataValueService.update(context, metadataValue);
dso.addDetails(metadataField.toString());
}

View File

@@ -1005,7 +1005,7 @@ public class ItemServiceImpl extends DSpaceObjectServiceImpl<Item> implements It
throws SQLException, AuthorizeException {
// Bundles should inherit from DEFAULT_ITEM_READ so that if the item is readable, the files
// can be listed (even if they are themselves not readable as per DEFAULT_BITSTREAM_READ or other
// policies or embargos applied
// policies or embargoes applied
List<ResourcePolicy> defaultCollectionBundlePolicies = authorizeService
.getPoliciesActionFilter(context, collection, Constants.DEFAULT_ITEM_READ);
// Bitstreams should inherit from DEFAULT_BITSTREAM_READ

View File

@@ -52,7 +52,7 @@ public class MetadataValueServiceImpl implements MetadataValueService {
metadataValue.setMetadataField(metadataField);
metadataValue.setDSpaceObject(dso);
dso.addMetadata(metadataValue);
//An update here isn't needed, this is persited upon the merge of the owning object
//An update here isn't needed, this is persisted upon the merge of the owning object
// metadataValueDAO.save(context, metadataValue);
metadataValue = metadataValueDAO.create(context, metadataValue);
log.info(LogHelper.getHeader(context, "add_metadatavalue",

View File

@@ -98,7 +98,7 @@ public class RelationshipType implements ReloadableEntity<Integer> {
private Integer rightMinCardinality;
/**
* Tha maximum amount of relations for the rightItem that can be present at all times
* The maximum amount of relations for the rightItem that can be present at all times
*/
@Column(name = "right_max_cardinality")
private Integer rightMaxCardinality;

View File

@@ -77,7 +77,7 @@ public class Choice {
/**
* Constructor for common need of Hierarchical authorities that want to
* explicitely set the selectable flag
* explicitly set the selectable flag
*
* @param authority the authority key
* @param value the text value to store in the metadata

View File

@@ -131,7 +131,7 @@ public interface ChoiceAuthority extends NameAwarePlugin {
* Build the preferred choice associated with the authKey. The default
* implementation delegate the creato to the {@link #getLabel(String, String)}
* {@link #getValue(String, String)} and {@link #getExtra(String, String)}
* methods but can be directly overriden for better efficiency or special
* methods but can be directly overridden for better efficiency or special
* scenario
*
* @param authKey authority key known to this authority.

View File

@@ -42,7 +42,7 @@ import org.springframework.beans.factory.annotation.Autowired;
* Broker for ChoiceAuthority plugins, and for other information configured
* about the choice aspect of authority control for a metadata field.
*
* Configuration keys, per metadata field (e.g. "dc.contributer.author")
* Configuration keys, per metadata field (e.g. "dc.contributor.author")
*
* {@code
* # names the ChoiceAuthority plugin called for this field

View File

@@ -30,7 +30,7 @@ import org.dspace.core.SelfNamedPlugin;
* configurable submission.
*
* Configuration:
* This MUST be configured aas a self-named plugin, e.g.:
* This MUST be configured as a self-named plugin, e.g.:
* {@code
* plugin.selfnamed.org.dspace.content.authority.ChoiceAuthority = \
* org.dspace.content.authority.DCInputAuthority

View File

@@ -76,7 +76,7 @@ public class SolrAuthority implements ChoiceAuthority {
Integer.parseInt(locale);
locale = null;
} catch (NumberFormatException e) {
//Everything is allright
//Everything is alright
}
if (locale != null && !"".equals(locale)) {
localSearchField = searchField + "_" + locale;

View File

@@ -22,7 +22,7 @@ import org.dspace.content.authority.DSpaceControlledVocabularyIndex;
* Broker for ChoiceAuthority plugins, and for other information configured
* about the choice aspect of authority control for a metadata field.
*
* Configuration keys, per metadata field (e.g. "dc.contributer.author")
* Configuration keys, per metadata field (e.g. "dc.contributor.author")
* {@code
* # names the ChoiceAuthority plugin called for this field
* choices.plugin.<FIELD> = name-of-plugin

View File

@@ -14,7 +14,7 @@ import org.dspace.content.MetadataField;
/**
* Broker for metadata authority settings configured for each metadata field.
*
* Configuration keys, per metadata field (e.g. "dc.contributer.author")
* Configuration keys, per metadata field (e.g. "dc.contributor.author")
*
* # is field authority controlled (i.e. store authority, confidence values)?
* {@code authority.controlled.<FIELD> = true}

View File

@@ -36,7 +36,7 @@ import org.jdom2.Namespace;
*/
public class DIMDisseminationCrosswalk
implements DisseminationCrosswalk {
// Non-existant XSD schema
// Non-existent XSD schema
public static final String DIM_XSD = "null";
// Namespaces

View File

@@ -37,7 +37,7 @@ public interface IngestionCrosswalk {
* internal representations. This version accepts metadata as a
* <code>List</code> of JDOM XML elements. It interprets the
* contents of each element and adds the appropriate values to the
* DSpace Object's internal metadata represenation.
* DSpace Object's internal metadata representation.
* <p>
* Note that this method may be called several times for the same target
* Item, if the metadata comes as several lists of elements, so it should

View File

@@ -202,7 +202,7 @@ public class MODSDisseminationCrosswalk extends SelfNamedPlugin
* e.g. dc.contributor.author
*
* 2. XML fragment is prototype of metadata element, with empty or "%s"
* placeholders for value(s). NOTE: Leave the %s's in becaue
* placeholders for value(s). NOTE: Leave the %s's in because
* it's much easier then to see if something is broken.
*
* 3. XPath expression listing point(s) in the above XML where

View File

@@ -173,7 +173,7 @@ public class OREIngestionCrosswalk
try {
// Make sure the url string escapes all the oddball characters
String processedURL = encodeForURL(href);
// Generate a requeset for the aggregated resource
// Generate a request for the aggregated resource
ARurl = new URL(processedURL);
in = ARurl.openStream();
} catch (FileNotFoundException fe) {

View File

@@ -113,7 +113,7 @@ public class QDCCrosswalk extends SelfNamedPlugin
private static final Namespace DCTERMS_NS =
Namespace.getNamespace("dcterms", "http://purl.org/dc/terms/");
// sentinal: done init?
// sentinel: done init?
private boolean inited = false;
// my plugin name

View File

@@ -47,12 +47,12 @@ public class SubscriptionDsoMetadataForEmailCompose implements StreamDisseminati
Item item = (Item) dso;
PrintStream printStream = new PrintStream(out);
for (String actualMetadata : metadata) {
String[] splitted = actualMetadata.split("\\.");
String[] split = actualMetadata.split("\\.");
String qualifier = null;
if (splitted.length == 3) {
qualifier = splitted[2];
if (split.length == 3) {
qualifier = split[2];
}
var metadataValue = itemService.getMetadataFirstValue(item, splitted[0], splitted[1], qualifier, ANY);
var metadataValue = itemService.getMetadataFirstValue(item, split[0], split[1], qualifier, ANY);
printStream.print(metadataValue + " ");
}
String itemURL = HandleServiceFactory.getInstance()

View File

@@ -52,7 +52,7 @@ public class MetadataValueDTO {
* @param schema The schema to be assigned to this MetadataValueDTO object
* @param element The element to be assigned to this MetadataValueDTO object
* @param qualifier The qualifier to be assigned to this MetadataValueDTO object
* @param language The language to be assigend to this MetadataValueDTO object
* @param language The language to be assigned to this MetadataValueDTO object
* @param value The value to be assigned to this MetadataValueDTO object
* @param authority The authority to be assigned to this MetadataValueDTO object
* @param confidence The confidence to be assigned to this MetadataValueDTO object
@@ -73,7 +73,7 @@ public class MetadataValueDTO {
* @param schema The schema to be assigned to this MetadataValueDTO object
* @param element The element to be assigned to this MetadataValueDTO object
* @param qualifier The qualifier to be assigned to this MetadataValueDTO object
* @param language The language to be assigend to this MetadataValueDTO object
* @param language The language to be assigned to this MetadataValueDTO object
* @param value The value to be assigned to this MetadataValueDTO object
*/
public MetadataValueDTO(String schema, String element, String qualifier, String language, String value) {

View File

@@ -74,7 +74,7 @@ public class FilterUtils {
Map<Class<? extends Identifier>, Filter> filters = new HashMap<>();
// Put DOI 'can we create DOI on install / workspace?' filter
Filter filter = FilterUtils.getFilterFromConfiguration("identifiers.submission.filter." + configurationSuffix);
// A null filter should be handled safely by the identifier provier (default, or "always true")
// A null filter should be handled safely by the identifier provider (default, or "always true")
filters.put(DOI.class, filter);
// This won't have an affect until handle providers implement filtering, but is an example of
// how the filters can be used for other types

View File

@@ -139,7 +139,7 @@ public class DSpaceAIPIngester
}
}
// MODS is acceptable otehrwise..
// MODS is acceptable otherwise..
if (found == -1) {
for (int i = 0; i < dmds.length; ++i) {
//NOTE: METS standard actually says this should be MODS (all uppercase). But,

View File

@@ -360,7 +360,7 @@ public class PDFPackager
* CreationDate -> date.created
* ModDate -> date.created
* Creator -> description.provenance (application that created orig)
* Producer -> description.provenance (convertor to pdf)
* Producer -> description.provenance (converter to pdf)
* Subject -> description.abstract
* Keywords -> subject.other
* date is java.util.Calendar

View File

@@ -38,7 +38,7 @@ import org.dspace.core.Context;
* format output by <code>disseminate</code> may be affected by
* parameters, it is given to the <code>getMIMEType</code> method as well.
* The parameters list is a generalized mechanism to pass parameters
* from the package requestor to the packager, since different packagers will
* from the package requester to the packager, since different packagers will
* understand different sets of parameters.
*
* @author Larry Stone

View File

@@ -34,7 +34,7 @@ import org.dspace.workflow.WorkflowException;
* The ingest methods are also given an attribute-value
* list of "parameters" which may modify their actions.
* The parameters list is a generalized mechanism to pass parameters
* from the requestor to the packager, since different packagers will
* from the requester to the packager, since different packagers will
* understand different sets of parameters.
*
* @author Larry Stone

View File

@@ -181,7 +181,7 @@ public interface CommunityService extends DSpaceObjectService<Community>, DSpace
/**
* Add an exisiting collection to the community
* Add an existing collection to the community
*
* @param context context
* @param community community

View File

@@ -20,7 +20,7 @@ import org.dspace.core.Context;
public interface FeedbackService {
/**
* This method sends the feeback email to the recipient passed as parameter
* This method sends the feedback email to the recipient passed as parameter
* @param context current DSpace application context
* @param request current servlet request
* @param recipientEmail recipient to which mail is sent

View File

@@ -282,7 +282,7 @@ public interface RelationshipService extends DSpaceCRUDService<Relationship> {
List<Relationship> findByRelationshipType(Context context, RelationshipType relationshipType) throws SQLException;
/**
* This method returns a list of Relationship objets for which the relationshipType property is equal to the given
* This method returns a list of Relationship objects for which the relationshipType property is equal to the given
* RelationshipType object
* NOTE: tilted relationships are NEVER excluded when fetching one relationship type
* @param context The relevant DSpace context

View File

@@ -62,16 +62,16 @@ public class Concatenate implements VirtualMetadataConfiguration {
}
/**
* Generic getter for the seperator
* @return the seperator to be used by this bean
* Generic getter for the separator
* @return the separator to be used by this bean
*/
public String getSeparator() {
return separator;
}
/**
* Generic setter for the seperator property
* @param separator The String seperator value to which this seperator value will be set to
* Generic setter for the separator property
* @param separator The String separator value to which this separator value will be set to
*/
public void setSeparator(String separator) {
this.separator = separator;

View File

@@ -346,7 +346,7 @@ public abstract class AbstractHibernateDAO<T> implements GenericDAO<T> {
/**
* This method will return the count of items for this query as an integer
* This query needs to already be in a formate that'll return one record that contains the amount
* This query needs to already be in a format that'll return one record that contains the amount
*
* @param query
* The query for which the amount of results will be returned.

View File

@@ -713,7 +713,7 @@ public class Context implements AutoCloseable {
public void switchContextUser(EPerson newUser) {
if (currentUserPreviousState != null) {
throw new IllegalStateException(
"A previous user is already set, you can only switch back and foreward one time");
"A previous user is already set, you can only switch back and forward one time");
}
currentUserPreviousState = currentUser;

View File

@@ -34,6 +34,6 @@ public final class HibernateProxyHelper {
}
private HibernateProxyHelper() {
//cant instantiate
//can't instantiate
}
}

View File

@@ -50,7 +50,7 @@ public class LogHelper {
StringBuilder result = new StringBuilder();
// Escape everthing but the extra context info because for some crazy reason two fields
// Escape everything but the extra context info because for some crazy reason two fields
// are generated inside this entry one for the session id, and another for the ip
// address. Everything else should be escaped.
result.append(escapeLogField(email)).append(":").append(contextExtraInfo).append(":")

View File

@@ -95,7 +95,7 @@ public final class Utils {
private static final SimpleDateFormat outFmtSecond
= new SimpleDateFormat("yyyy'-'MM'-'dd'T'HH':'mm':'ssZ");
// output format with millsecond precision
// output format with millisecond precision
private static final SimpleDateFormat outFmtMillisec
= new SimpleDateFormat("yyyy'-'MM'-'dd'T'HH':'mm':'ss.SSSZ");

View File

@@ -32,7 +32,7 @@ import org.springframework.beans.factory.annotation.Autowired;
/**
* Implementation class for {@link CorrectionType}
* that will withdrawn target item if it archived and wasn't withdrawn alredy.
* that will withdrawn target item if it archived and wasn't withdrawn already.
*
* @author Mykhaylo Boychuk (mykhaylo.boychuk at 4science.com)
*/

View File

@@ -145,7 +145,7 @@ public class BasicLinkChecker extends AbstractCurationTask {
}
/**
* Internal utitity method to get a description of the handle
* Internal utility method to get a description of the handle
*
* @param item The item to get a description of
* @return The handle, or in workflow

View File

@@ -114,7 +114,7 @@ public class CitationPage extends AbstractCurationTask {
// don't inherit now otherwise they will be copied over the moved bitstreams
resourcePolicyService.removeAllPolicies(Curator.curationContext(), dBundle);
} catch (AuthorizeException e) {
log.error("User not authroized to create bundle on item \"{}\": {}",
log.error("User not authorized to create bundle on item \"{}\": {}",
item::getName, e::getMessage);
return;
}
@@ -144,7 +144,7 @@ public class CitationPage extends AbstractCurationTask {
// don't inherit now otherwise they will be copied over the moved bitstreams
resourcePolicyService.removeAllPolicies(Curator.curationContext(), pBundle);
} catch (AuthorizeException e) {
log.error("User not authroized to create bundle on item \""
log.error("User not authorized to create bundle on item \""
+ item.getName() + "\": " + e.getMessage());
}
bundles = itemService.getBundles(item, "ORIGINAL");
@@ -173,7 +173,7 @@ public class CitationPage extends AbstractCurationTask {
InputStream citedInputStream =
new ByteArrayInputStream(
citationDocument.makeCitedDocument(Curator.curationContext(), bitstream).getLeft());
//Add the cited document to the approiate bundle
//Add the cited document to the appropriate bundle
this.addCitedPageToItem(citedInputStream, bundle, pBundle,
dBundle, item, bitstream);
// now set the policies of the preservation and display bundle

View File

@@ -16,7 +16,7 @@ import org.dspace.core.Context;
* ScriptedTask describes a rather generic ability to perform an operation
* upon a DSpace object. It's semantics are identical to the CurationTask interface,
* but is designed to be implemented in scripting languages, rather than
* Java. For this reason, the 'perform' methods are renamed to accomodate
* Java. For this reason, the 'perform' methods are renamed to accommodate
* languages (like Ruby) that lack method overloading.
*
* @author richardrodgers

View File

@@ -94,7 +94,7 @@ public enum IndexClientOptions {
options.addOption("b", "build", false, "(re)build index, wiping out current one if it exists");
options.addOption("s", "spellchecker", false, "Rebuild the spellchecker, can be combined with -b and -f.");
options.addOption("f", "force", false,
"if updating existing index, force each handle to be reindexed even if uptodate");
"if updating existing index, force each handle to be reindexed even if up-to-date");
options.addOption("h", "help", false, "print this help message");
return options;
}

View File

@@ -79,7 +79,7 @@ public interface IndexingService {
/**
* Atomically update the index of a single field for an object
* @param context The DSpace context
* @param uniqueIndexId The unqiue index ID of the object to update the index for
* @param uniqueIndexId The unique index ID of the object to update the index for
* @param field The field to update
* @param fieldModifier The modifiers for the field to update. More information on how to atomically update a solr
* field using a field modifier can be found here: https://yonik.com/solr/atomic-updates/

View File

@@ -88,7 +88,7 @@ public class SearchUtils {
/**
* Retrieves the Discovery Configuration with a null prefix for a DSpace object.
* @param context
* the dabase context
* the database context
* @param dso
* the DSpace object
* @return the Discovery Configuration for the specified DSpace object

View File

@@ -1230,7 +1230,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
} catch (IOException | SQLException | SolrServerException e) {
// Any acception that we get ignore it.
// We do NOT want any crashed to shown by the user
log.error(LogHelper.getHeader(context, "Error while quering solr", "Query: " + query), e);
log.error(LogHelper.getHeader(context, "Error while querying solr", "Query: " + query), e);
return new ArrayList<>(0);
}
}
@@ -1359,7 +1359,7 @@ public class SolrServiceImpl implements SearchService, IndexingService {
* Gets the solr field that contains the facet value split on each word break to the end, so can be searched
* on each word in the value, see {@link org.dspace.discovery.indexobject.ItemIndexFactoryImpl
* #saveFacetPrefixParts(SolrInputDocument, DiscoverySearchFilter, String, String)}
* Ony applicable to facets of type {@link DiscoveryConfigurationParameters.TYPE_TEXT}, otherwise uses the regular
* Only applicable to facets of type {@link DiscoveryConfigurationParameters.TYPE_TEXT}, otherwise uses the regular
* facet filter field
*/
protected String transformPrefixFacetField(DiscoverFacetField facetFieldConfig, String field,

View File

@@ -79,7 +79,7 @@ public class SolrServiceMetadataBrowseIndexingPlugin implements SolrServiceIndex
// Faceting for metadata browsing. It is different than search facet
// because if there are authority with variants support we want all the
// variants to go in the facet... they are sorted by count so just the
// prefered label is relevant
// preferred label is relevant
for (BrowseIndex bi : bis) {
log.debug("Indexing for item " + item.getID() + ", for index: "
+ bi.getTableName());
@@ -280,7 +280,7 @@ public class SolrServiceMetadataBrowseIndexingPlugin implements SolrServiceIndex
}
}
// Add sorting options as configurated for the browse system
// Add sorting options as configured for the browse system
try {
for (SortOption so : SortOption.getSortOptions()) {
List<MetadataValue> dcvalue = itemService.getMetadataByMetadataString(item, so.getMetadata());

View File

@@ -57,7 +57,7 @@ public class DiscoverySearchFilter {
* For the DiscoverySearchFilter only the TYPE_TEXT, TYPE_DATE and TYPE_HIERARCHICAL are allowed
*
* @param type The type for this DiscoverySearchFilter
* @throws DiscoveryConfigurationException If none of the types match, this error will be thrown indiciating this
* @throws DiscoveryConfigurationException If none of the types match, this error will be thrown indicating this
*/
public void setType(String type) throws DiscoveryConfigurationException {
if (type.equalsIgnoreCase(DiscoveryConfigurationParameters.TYPE_TEXT)) {

View File

@@ -174,7 +174,7 @@ public class EPerson extends CacheableDSpaceObject implements DSpaceObjectLegacy
/**
* Set the EPerson's language. Value is expected to be a Unix/POSIX
* Locale specification of the form {language} or {language}_{territory},
* e.g. "en", "en_US", "pt_BR" (the latter is Brazilian Portugese).
* e.g. "en", "en_US", "pt_BR" (the latter is Brazilian Portuguese).
*
* @param context The relevant DSpace Context.
* @param language language code

View File

@@ -47,7 +47,7 @@ public class EPersonConsumer implements Consumer {
= DSpaceServicesFactory.getInstance().getConfigurationService();
/**
* Initalise the consumer
* Initialise the consumer
*
* @throws Exception if error
*/

View File

@@ -343,11 +343,11 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl<EPerson> impleme
try {
delete(context, ePerson, true);
} catch (AuthorizeException ex) {
log.error("This AuthorizeException: " + ex + " occured while deleting Eperson with the ID: " +
log.error("This AuthorizeException: " + ex + " occurred while deleting Eperson with the ID: " +
ePerson.getID());
throw new AuthorizeException(ex);
} catch (IOException ex) {
log.error("This IOException: " + ex + " occured while deleting Eperson with the ID: " + ePerson.getID());
log.error("This IOException: " + ex + " occurred while deleting Eperson with the ID: " + ePerson.getID());
throw new AuthorizeException(ex);
} catch (EPersonDeletionException e) {
throw new IllegalStateException(e);
@@ -451,7 +451,7 @@ public class EPersonServiceImpl extends DSpaceObjectServiceImpl<EPerson> impleme
ePerson, task.getStepID());
} catch (WorkflowConfigurationException ex) {
log.error("This WorkflowConfigurationException: " + ex +
" occured while deleting Eperson with the ID: " + ePerson.getID());
" occurred while deleting Eperson with the ID: " + ePerson.getID());
throw new AuthorizeException(new EPersonDeletionException(Collections
.singletonList(tableName)));
}

View File

@@ -193,7 +193,7 @@ public class Event implements Serializable {
* Contains all identifiers of the DSpaceObject that was changed (added,
* modified, deleted, ...).
*
* All events gets fired when a context that contains events gets commited.
* All events gets fired when a context that contains events gets committed.
* When the delete event is fired, a deleted DSpaceObject is already gone.
* This array contains all identifiers of the object, not only the handle
* as the detail field does. The field may be an empty array if no

View File

@@ -107,7 +107,7 @@ public class EventServiceImpl implements EventService {
try {
return (Dispatcher) dispatcherPool.borrowObject(name);
} catch (Exception e) {
throw new IllegalStateException("Unable to aquire dispatcher named " + name, e);
throw new IllegalStateException("Unable to acquire dispatcher named " + name, e);
}
}
@@ -153,7 +153,7 @@ public class EventServiceImpl implements EventService {
// Prefix of keys in DSpace Configuration
private static final String PROP_PFX = "event.dispatcher";
// Cache of event dispatchers, keyed by name, for re-use.
// Cache of event dispatchers, keyed by name, for reuse.
protected Map<String, String> dispatchers = new HashMap<String, String>();
public DispatcherPoolFactory() {

View File

@@ -27,7 +27,7 @@ public interface EventService {
* if one exists.
*
* @param name dispatcher name
* @return chached instance of dispatcher
* @return cached instance of dispatcher
*/
public Dispatcher getDispatcher(String name);

View File

@@ -26,7 +26,7 @@ import org.dspace.external.provider.AbstractExternalDataProvider;
/**
* This class is the implementation of the ExternalDataProvider interface that will deal with SherpaJournal External
* data lookups based on ISSN (to match functinoality offered by legacy SHERPASubmitService for policy lookups
* data lookups based on ISSN (to match functionality offered by legacy SHERPASubmitService for policy lookups
* at the time of submission).
* This provider is a refactored version of SherpaJournalDataPublisher, rewritten to work with SHERPA v2 API
*

View File

@@ -58,7 +58,7 @@ public interface ExternalDataService {
public List<ExternalDataObject> searchExternalDataObjects(String source, String query, int start, int limit);
/**
* This method wil return the total amount of results that will be found for the given query in the given source
* This method will return the total amount of results that will be found for the given query in the given source
* @param source The source in which the query will happen to return the number of results
* @param query The query to be ran in this source to retrieve the total amount of results
* @return The total amount of results that can be returned for this query in the given source

View File

@@ -37,7 +37,7 @@ import org.springframework.beans.factory.annotation.Autowired;
/**
* Notifies Google Analytics of Bitstream VIEW events. These events are stored in memory and then
* asynchronously processed by a single seperate thread.
* asynchronously processed by a single separate thread.
*
* @author April Herron
* @author Luca Giamminonni
@@ -142,7 +142,7 @@ public class GoogleAsyncEventListener extends AbstractUsageEventListener {
/**
* Client ID, should uniquely identify the user or device. If we have an
* X-CORRELATION-ID header or a session ID for the user, then lets use it,
* othwerwise generate a UUID.
* otherwise generate a UUID.
*/
private String getClientId(UsageEvent usageEvent) {
if (usageEvent.getRequest().getHeader("X-CORRELATION-ID") != null) {

View File

@@ -312,7 +312,7 @@ public class HandleServiceImpl implements HandleService {
Handle dbHandle = findHandleInternal(context, handle);
if (dbHandle != null) {
// Check if we have to remove the handle from the current handle list
// or if object is alreday deleted.
// or if object is already deleted.
if (dbHandle.getDSpaceObject() != null) {
// Remove the old handle from the current handle list
dbHandle.getDSpaceObject().getHandles().remove(dbHandle);

View File

@@ -57,7 +57,7 @@ public class HdlResolverDTO {
}
/**
* Returns the splitted String of the resource-path
* Returns the split String of the resource-path
*
* @return
*/

View File

@@ -58,12 +58,12 @@ public class HarvestThread extends Thread {
} catch (RuntimeException e) {
log.error("Runtime exception in thread: " + this.toString());
log.error(e.getMessage() + " " + e.getCause());
hc.setHarvestMessage("Runtime error occured while generating an OAI response");
hc.setHarvestMessage("Runtime error occurred while generating an OAI response");
hc.setHarvestStatus(HarvestedCollection.STATUS_UNKNOWN_ERROR);
} catch (Exception ex) {
log.error("General exception in thread: " + this.toString());
log.error(ex.getMessage() + " " + ex.getCause());
hc.setHarvestMessage("Error occured while generating an OAI response");
hc.setHarvestMessage("Error occurred while generating an OAI response");
hc.setHarvestStatus(HarvestedCollection.STATUS_UNKNOWN_ERROR);
} finally {
try {

View File

@@ -286,8 +286,8 @@ public class DOIIdentifierProvider extends FilteredIdentifierProvider {
try {
doiRow = loadOrCreateDOI(context, dso, doi, filter);
} catch (SQLException ex) {
log.error("Error in databse connection: {}", ex::getMessage);
throw new RuntimeException("Error in database conncetion.", ex);
log.error("Error in database connection: {}", ex::getMessage);
throw new RuntimeException("Error in database connection.", ex);
}
if (DELETED.equals(doiRow.getStatus()) ||
@@ -473,7 +473,7 @@ public class DOIIdentifierProvider extends FilteredIdentifierProvider {
/**
* Update metadata for a registered object
* If the DOI for hte item already exists, *always* skip the filter since it should only be used for
* If the DOI for the item already exists, *always* skip the filter since it should only be used for
* allowing / disallowing reservation and registration, not metadata updates or deletions
*
* @param context - DSpace context
@@ -525,7 +525,7 @@ public class DOIIdentifierProvider extends FilteredIdentifierProvider {
/**
* Update metadata for a registered object in the DOI Connector to update the agency records
* If the DOI for hte item already exists, *always* skip the filter since it should only be used for
* If the DOI for the item already exists, *always* skip the filter since it should only be used for
* allowing / disallowing reservation and registration, not metadata updates or deletions
*
* @param context - DSpace context
@@ -611,7 +611,7 @@ public class DOIIdentifierProvider extends FilteredIdentifierProvider {
try {
doi = getDOIByObject(context, dso);
} catch (SQLException e) {
log.error("Error while attemping to retrieve information about a DOI for {} with ID {}.",
log.error("Error while attempting to retrieve information about a DOI for {} with ID {}.",
contentServiceFactory.getDSpaceObjectService(dso).getTypeText(dso), dso.getID());
throw new RuntimeException("Error while attempting to retrieve " +
"information about a DOI for " + contentServiceFactory.getDSpaceObjectService(dso).getTypeText(dso) +
@@ -709,7 +709,7 @@ public class DOIIdentifierProvider extends FilteredIdentifierProvider {
doi = getDOIByObject(context, dso);
}
} catch (SQLException ex) {
log.error("Error while attemping to retrieve information about a DOI for {} with ID {}.",
log.error("Error while attempting to retrieve information about a DOI for {} with ID {}.",
contentServiceFactory.getDSpaceObjectService(dso).getTypeText(dso),
dso.getID(), ex);
throw new RuntimeException("Error while attempting to retrieve " +

View File

@@ -137,8 +137,9 @@ public class VersionedDOIIdentifierProvider extends DOIIdentifierProvider implem
loadOrCreateDOI(context, dso, versionedDOI, filter);
} catch (SQLException ex) {
log.error(
"A problem with the database connection occurd while processing DOI " + versionedDOI + ".", ex);
throw new RuntimeException("A problem with the database connection occured.", ex);
"A problem with the database connection occurred while processing DOI " + versionedDOI + ".",
ex);
throw new RuntimeException("A problem with the database connection occurred.", ex);
}
return versionedDOI;
}
@@ -350,14 +351,14 @@ public class VersionedDOIIdentifierProvider extends DOIIdentifierProvider implem
changed = true;
}
}
// reset the metadata if neccessary.
// reset the metadata if necessary.
if (changed) {
try {
itemService.clearMetadata(c, item, MD_SCHEMA, DOI_ELEMENT, DOI_QUALIFIER, Item.ANY);
itemService.addMetadata(c, item, MD_SCHEMA, DOI_ELEMENT, DOI_QUALIFIER, null, newIdentifiers);
itemService.update(c, item);
} catch (SQLException ex) {
throw new RuntimeException("A problem with the database connection occured.", ex);
throw new RuntimeException("A problem with the database connection occurred.", ex);
}
}
}

View File

@@ -127,7 +127,7 @@ public class VersionedHandleIdentifierProvider extends IdentifierProvider implem
try {
versionNumber = Integer.valueOf(versionHandleMatcher.group(1));
} catch (NumberFormatException ex) {
throw new IllegalStateException("Cannot detect the interger value of a digit.", ex);
throw new IllegalStateException("Cannot detect the integer value of a digit.", ex);
}
// get history
@@ -148,7 +148,7 @@ public class VersionedHandleIdentifierProvider extends IdentifierProvider implem
try {
versionHistoryService.getVersion(context, history, item);
} catch (SQLException ex) {
throw new RuntimeException("Problem with the database connection occurd.", ex);
throw new RuntimeException("Problem with the database connection occurred.", ex);
}
// did we found a version?
@@ -184,11 +184,11 @@ public class VersionedHandleIdentifierProvider extends IdentifierProvider implem
} catch (SQLException | IOException ex) {
throw new RuntimeException("Unable to restore a versioned "
+ "handle as there was a problem in creating a "
+ "neccessary item version: ", ex);
+ "necessary item version: ", ex);
} catch (AuthorizeException ex) {
throw new RuntimeException("Unable to restore a versioned "
+ "handle as the current user was not allowed to "
+ "create a neccessary item version: ", ex);
+ "create a necessary item version: ", ex);
}
return;
}

Some files were not shown because too many files have changed in this diff Show More