mirror of
https://github.com/DSpace/dspace-angular.git
synced 2025-10-07 01:54:15 +00:00
Merge pull request #2334 from 4Science/fix-bistream-download-cache
fix for bitstreams are not visible to crawlers when caching is enabled
This commit is contained in:
11
server.ts
11
server.ts
@@ -461,6 +461,8 @@ function saveToCache(req, page: any) {
|
||||
const key = getCacheKey(req);
|
||||
// Avoid caching "/reload/[random]" paths (these are hard refreshes after logout)
|
||||
if (key.startsWith('/reload')) { return; }
|
||||
// Avoid caching not successful responses (status code different from 2XX status)
|
||||
if (hasNotSucceeded(req.res.statusCode)) { return; }
|
||||
|
||||
// Retrieve response headers to save, if any
|
||||
const headers = retrieveHeaders(req.res);
|
||||
@@ -479,6 +481,15 @@ function saveToCache(req, page: any) {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if status code is different from 2XX
|
||||
* @param statusCode
|
||||
*/
|
||||
function hasNotSucceeded(statusCode) {
|
||||
const rgx = new RegExp(/^20+/);
|
||||
return !rgx.test(statusCode)
|
||||
}
|
||||
|
||||
function retrieveHeaders(response) {
|
||||
const headers = Object.create({});
|
||||
if (Array.isArray(environment.cache.serverSide.headers) && environment.cache.serverSide.headers.length > 0) {
|
||||
|
Reference in New Issue
Block a user