mirror of
https://github.com/DSpace/dspace-angular.git
synced 2025-10-07 01:54:15 +00:00
312 lines
14 KiB
YAML
312 lines
14 KiB
YAML
# DSpace Continuous Integration/Build via GitHub Actions
|
|
# Concepts borrowed from
|
|
# https://docs.github.com/en/free-pro-team@latest/actions/guides/building-and-testing-nodejs
|
|
name: Build
|
|
|
|
# Run this Build for all pushes / PRs to current branch
|
|
on: [push, pull_request]
|
|
|
|
permissions:
|
|
contents: read # to fetch code (actions/checkout)
|
|
packages: read # to fetch private images from GitHub Container Registry (GHCR)
|
|
|
|
jobs:
|
|
tests:
|
|
runs-on: ubuntu-latest
|
|
env:
|
|
# The ci step will test the dspace-angular code against DSpace REST.
|
|
# Direct that step to utilize a DSpace REST service that has been started in docker.
|
|
# NOTE: These settings should be kept in sync with those in [src]/docker/docker-compose-ci.yml
|
|
DSPACE_REST_HOST: 127.0.0.1
|
|
DSPACE_REST_PORT: 8080
|
|
DSPACE_REST_NAMESPACE: '/server'
|
|
DSPACE_REST_SSL: false
|
|
# Spin up UI on 127.0.0.1 to avoid host resolution issues in e2e tests with Node 18+
|
|
DSPACE_UI_HOST: 127.0.0.1
|
|
DSPACE_UI_PORT: 4000
|
|
# Ensure all SSR caching is disabled in test environment
|
|
DSPACE_CACHE_SERVERSIDE_BOTCACHE_MAX: 0
|
|
DSPACE_CACHE_SERVERSIDE_ANONYMOUSCACHE_MAX: 0
|
|
# Tell Cypress to run e2e tests using the same UI URL
|
|
CYPRESS_BASE_URL: http://127.0.0.1:4000
|
|
# Disable the cookie consent banner in e2e tests to avoid errors because of elements hidden by it
|
|
DSPACE_INFO_ENABLECOOKIECONSENTPOPUP: false
|
|
# When Chrome version is specified, we pin to a specific version of Chrome
|
|
# Comment this out to use the latest release
|
|
#CHROME_VERSION: "90.0.4430.212-1"
|
|
# Bump Node heap size (OOM in CI after upgrading to Angular 15)
|
|
NODE_OPTIONS: '--max-old-space-size=4096'
|
|
# Project name to use when running "docker compose" prior to e2e tests
|
|
COMPOSE_PROJECT_NAME: 'ci'
|
|
# Docker Registry to use for Docker compose scripts below.
|
|
# We use GitHub's Container Registry to avoid aggressive rate limits at DockerHub.
|
|
DOCKER_REGISTRY: ghcr.io
|
|
strategy:
|
|
# Create a matrix of Node versions to test against (in parallel)
|
|
matrix:
|
|
node-version: [18.x, 20.x]
|
|
# Do NOT exit immediately if one matrix job fails
|
|
fail-fast: false
|
|
# These are the actual CI steps to perform per job
|
|
steps:
|
|
# https://github.com/actions/checkout
|
|
- name: Checkout codebase
|
|
uses: actions/checkout@v4
|
|
|
|
# https://github.com/actions/setup-node
|
|
- name: Install Node.js ${{ matrix.node-version }}
|
|
uses: actions/setup-node@v4
|
|
with:
|
|
node-version: ${{ matrix.node-version }}
|
|
|
|
# If CHROME_VERSION env variable specified above, then pin to that version.
|
|
# Otherwise, just install latest version of Chrome.
|
|
- name: Install Chrome (for e2e tests)
|
|
run: |
|
|
if [[ -z "${CHROME_VERSION}" ]]
|
|
then
|
|
echo "Installing latest stable version"
|
|
sudo apt-get update
|
|
sudo apt-get --only-upgrade install google-chrome-stable -y
|
|
else
|
|
echo "Installing version ${CHROME_VERSION}"
|
|
wget -q "https://dl.google.com/linux/chrome/deb/pool/main/g/google-chrome-stable/google-chrome-stable_${CHROME_VERSION}_amd64.deb"
|
|
sudo dpkg -i "google-chrome-stable_${CHROME_VERSION}_amd64.deb"
|
|
fi
|
|
google-chrome --version
|
|
|
|
# https://github.com/actions/cache/blob/main/examples.md#node---npm
|
|
- name: Get NPM cache directory
|
|
id: npm-cache-dir
|
|
run: echo "dir=$(npm config get cache)" >> $GITHUB_OUTPUT
|
|
- name: Cache NPM dependencies
|
|
uses: actions/cache@v4
|
|
with:
|
|
# Cache entire NPM cache directory (see previous step)
|
|
path: ${{ steps.npm-cache-dir.outputs.dir }}
|
|
# Cache key is hash of package-lock.json. Therefore changes to package-lock.json will invalidate cache
|
|
key: ${{ runner.os }}-npm-${{ hashFiles('**/package-lock.json') }}
|
|
restore-keys: ${{ runner.os }}-npm-
|
|
|
|
- name: Install NPM dependencies
|
|
run: npm clean-install
|
|
|
|
- name: Build lint plugins
|
|
run: npm run build:lint
|
|
|
|
- name: Run lint plugin tests
|
|
run: npm run test:lint:nobuild
|
|
|
|
- name: Run lint
|
|
run: npm run lint:nobuild -- --quiet
|
|
|
|
- name: Check for circular dependencies
|
|
run: npm run check-circ-deps
|
|
|
|
- name: Run build
|
|
run: npm run build:prod
|
|
|
|
- name: Run specs (unit tests)
|
|
run: npm run test:headless
|
|
|
|
# Upload code coverage report to artifact (for one version of Node only),
|
|
# so that it can be shared with the 'codecov' job (see below)
|
|
# NOTE: Angular CLI only supports code coverage for specs. See https://github.com/angular/angular-cli/issues/6286
|
|
- name: Upload code coverage report to Artifact
|
|
uses: actions/upload-artifact@v4
|
|
if: matrix.node-version == '18.x'
|
|
with:
|
|
name: coverage-report-${{ matrix.node-version }}
|
|
path: 'coverage/dspace-angular/lcov.info'
|
|
retention-days: 14
|
|
|
|
# Login to our Docker registry, so that we can access private Docker images using "docker compose" below.
|
|
- name: Login to ${{ env.DOCKER_REGISTRY }}
|
|
uses: docker/login-action@v3
|
|
with:
|
|
registry: ${{ env.DOCKER_REGISTRY }}
|
|
username: ${{ github.repository_owner }}
|
|
password: ${{ secrets.GITHUB_TOKEN }}
|
|
|
|
# Using "docker compose" start backend using CI configuration
|
|
# and load assetstore from a cached copy
|
|
- name: Start DSpace REST Backend via Docker (for e2e tests)
|
|
run: |
|
|
docker compose -f ./docker/docker-compose-ci.yml up -d
|
|
docker compose -f ./docker/cli.yml -f ./docker/cli.assetstore.yml run --rm dspace-cli
|
|
docker container ls
|
|
|
|
# Run integration tests via Cypress.io
|
|
# https://github.com/cypress-io/github-action
|
|
# (NOTE: to run these e2e tests locally, just use 'ng e2e')
|
|
- name: Run e2e tests (integration tests)
|
|
uses: cypress-io/github-action@v6
|
|
with:
|
|
# Run tests in Chrome, headless mode (default)
|
|
browser: chrome
|
|
# Start app before running tests (will be stopped automatically after tests finish)
|
|
start: npm run serve:ssr
|
|
# Wait for backend & frontend to be available
|
|
# NOTE: We use the 'sites' REST endpoint to also ensure the database is ready
|
|
wait-on: http://127.0.0.1:8080/server/api/core/sites, http://127.0.0.1:4000
|
|
# Wait for 2 mins max for everything to respond
|
|
wait-on-timeout: 120
|
|
|
|
# Cypress always creates a video of all e2e tests (whether they succeeded or failed)
|
|
# Save those in an Artifact
|
|
- name: Upload e2e test videos to Artifacts
|
|
uses: actions/upload-artifact@v4
|
|
if: always()
|
|
with:
|
|
name: e2e-test-videos-${{ matrix.node-version }}
|
|
path: cypress/videos
|
|
|
|
# If e2e tests fail, Cypress creates a screenshot of what happened
|
|
# Save those in an Artifact
|
|
- name: Upload e2e test failure screenshots to Artifacts
|
|
uses: actions/upload-artifact@v4
|
|
if: failure()
|
|
with:
|
|
name: e2e-test-screenshots-${{ matrix.node-version }}
|
|
path: cypress/screenshots
|
|
|
|
- name: Stop app (in case it stays up after e2e tests)
|
|
run: |
|
|
app_pid=$(lsof -t -i:4000)
|
|
if [[ ! -z $app_pid ]]; then
|
|
echo "App was still up! (PID: $app_pid)"
|
|
kill -9 $app_pid
|
|
fi
|
|
|
|
# Start up the app with SSR enabled (run in background)
|
|
- name: Start app in SSR (server-side rendering) mode
|
|
run: |
|
|
nohup npm run serve:ssr &
|
|
printf 'Waiting for app to start'
|
|
until curl --output /dev/null --silent --head --fail http://127.0.0.1:4000/home; do
|
|
printf '.'
|
|
sleep 2
|
|
done
|
|
echo "App started successfully."
|
|
|
|
# Get homepage and verify that the <meta name="title"> tag includes "DSpace".
|
|
# If it does, then SSR is working, as this tag is created by our MetadataService.
|
|
# This step also prints entire HTML of homepage for easier debugging if grep fails.
|
|
- name: Verify SSR (server-side rendering) on Homepage
|
|
run: |
|
|
result=$(wget -O- -q http://127.0.0.1:4000/home)
|
|
echo "$result"
|
|
echo "$result" | grep -oE "<meta name=\"title\" [^>]*>" | grep DSpace
|
|
|
|
# Get a specific community in our test data and verify that the "<h1>" tag includes "Publications" (the community name).
|
|
# If it does, then SSR is working.
|
|
- name: Verify SSR on a Community page
|
|
run: |
|
|
result=$(wget -O- -q http://127.0.0.1:4000/communities/0958c910-2037-42a9-81c7-dca80e3892b4)
|
|
echo "$result"
|
|
echo "$result" | grep -oE "<h1 [^>]*>[^><]*</h1>" | grep Publications
|
|
|
|
# Get a specific collection in our test data and verify that the "<h1>" tag includes "Articles" (the collection name).
|
|
# If it does, then SSR is working.
|
|
- name: Verify SSR on a Collection page
|
|
run: |
|
|
result=$(wget -O- -q http://127.0.0.1:4000/collections/282164f5-d325-4740-8dd1-fa4d6d3e7200)
|
|
echo "$result"
|
|
echo "$result" | grep -oE "<h1 [^>]*>[^><]*</h1>" | grep Articles
|
|
|
|
# Get a specific publication in our test data and verify that the <meta name="title"> tag includes
|
|
# the title of this publication. If it does, then SSR is working.
|
|
- name: Verify SSR on a Publication page
|
|
run: |
|
|
result=$(wget -O- -q http://127.0.0.1:4000/entities/publication/6160810f-1e53-40db-81ef-f6621a727398)
|
|
echo "$result"
|
|
echo "$result" | grep -oE "<meta name=\"title\" [^>]*>" | grep "An Economic Model of Mortality Salience"
|
|
|
|
# Get a specific person in our test data and verify that the <meta name="title"> tag includes
|
|
# the name of the person. If it does, then SSR is working.
|
|
- name: Verify SSR on a Person page
|
|
run: |
|
|
result=$(wget -O- -q http://127.0.0.1:4000/entities/person/b1b2c768-bda1-448a-a073-fc541e8b24d9)
|
|
echo "$result"
|
|
echo "$result" | grep -oE "<meta name=\"title\" [^>]*>" | grep "Simmons, Cameron"
|
|
|
|
# Get a specific project in our test data and verify that the <meta name="title"> tag includes
|
|
# the name of the project. If it does, then SSR is working.
|
|
- name: Verify SSR on a Project page
|
|
run: |
|
|
result=$(wget -O- -q http://127.0.0.1:4000/entities/project/46ccb608-a74c-4bf6-bc7a-e29cc7defea9)
|
|
echo "$result"
|
|
echo "$result" | grep -oE "<meta name=\"title\" [^>]*>" | grep "University Research Fellowship"
|
|
|
|
# Get a specific orgunit in our test data and verify that the <meta name="title"> tag includes
|
|
# the name of the orgunit. If it does, then SSR is working.
|
|
- name: Verify SSR on an OrgUnit page
|
|
run: |
|
|
result=$(wget -O- -q http://127.0.0.1:4000/entities/orgunit/9851674d-bd9a-467b-8d84-068deb568ccf)
|
|
echo "$result"
|
|
echo "$result" | grep -oE "<meta name=\"title\" [^>]*>" | grep "Law and Development"
|
|
|
|
# Get a specific journal in our test data and verify that the <meta name="title"> tag includes
|
|
# the name of the journal. If it does, then SSR is working.
|
|
- name: Verify SSR on a Journal page
|
|
run: |
|
|
result=$(wget -O- -q http://127.0.0.1:4000/entities/journal/d4af6c3e-53d0-4757-81eb-566f3b45d63a)
|
|
echo "$result"
|
|
echo "$result" | grep -oE "<meta name=\"title\" [^>]*>" | grep "Environmental & Architectural Phenomenology"
|
|
|
|
# Get a specific journal volume in our test data and verify that the <meta name="title"> tag includes
|
|
# the name of the volume. If it does, then SSR is working.
|
|
- name: Verify SSR on a Journal Volume page
|
|
run: |
|
|
result=$(wget -O- -q http://127.0.0.1:4000/entities/journalvolume/07c6249f-4bf7-494d-9ce3-6ffdb2aed538)
|
|
echo "$result"
|
|
echo "$result" | grep -oE "<meta name=\"title\" [^>]*>" | grep "Environmental & Architectural Phenomenology Volume 28 (2017)"
|
|
|
|
# Get a specific journal issue in our test data and verify that the <meta name="title"> tag includes
|
|
# the name of the issue. If it does, then SSR is working.
|
|
- name: Verify SSR on a Journal Issue page
|
|
run: |
|
|
result=$(wget -O- -q http://127.0.0.1:4000/entities/journalissue/44c29473-5de2-48fa-b005-e5029aa1a50b)
|
|
echo "$result"
|
|
echo "$result" | grep -oE "<meta name=\"title\" [^>]*>" | grep "Environmental & Architectural Phenomenology Vol. 28, No. 1"
|
|
|
|
- name: Stop running app
|
|
run: kill -9 $(lsof -t -i:4000)
|
|
|
|
- name: Shutdown Docker containers
|
|
run: docker compose -f ./docker/docker-compose-ci.yml down
|
|
|
|
# Codecov upload is a separate job in order to allow us to restart this separate from the entire build/test
|
|
# job above. This is necessary because Codecov uploads seem to randomly fail at times.
|
|
# See https://community.codecov.com/t/upload-issues-unable-to-locate-build-via-github-actions-api/3954
|
|
codecov:
|
|
# Must run after 'tests' job above
|
|
needs: tests
|
|
runs-on: ubuntu-latest
|
|
steps:
|
|
- name: Checkout
|
|
uses: actions/checkout@v4
|
|
|
|
# Download artifacts from previous 'tests' job
|
|
- name: Download coverage artifacts
|
|
uses: actions/download-artifact@v4
|
|
|
|
# Now attempt upload to Codecov using its action.
|
|
# NOTE: We use a retry action to retry the Codecov upload if it fails the first time.
|
|
#
|
|
# Retry action: https://github.com/marketplace/actions/retry-action
|
|
# Codecov action: https://github.com/codecov/codecov-action
|
|
- name: Upload coverage to Codecov.io
|
|
uses: Wandalen/wretry.action@v1.3.0
|
|
with:
|
|
action: codecov/codecov-action@v4
|
|
# Ensure codecov-action throws an error when it fails to upload
|
|
# This allows us to auto-restart the action if an error is thrown
|
|
with: |
|
|
fail_ci_if_error: true
|
|
token: ${{ secrets.CODECOV_TOKEN }}
|
|
# Try re-running action 5 times max
|
|
attempt_limit: 5
|
|
# Run again in 30 seconds
|
|
attempt_delay: 30000
|