# DSpace Continuous Integration/Build via GitHub Actions
# Concepts borrowed from
# https://docs.github.com/en/free-pro-team@latest/actions/guides/building-and-testing-nodejs
name: Build

# Run this Build for all pushes / PRs to current branch
on: [push, pull_request]

permissions:
  contents: read  # to fetch code (actions/checkout)
  packages: read  # to fetch private images from GitHub Container Registry (GHCR)

jobs:
  tests:
    runs-on: ubuntu-latest
    env:
      # The ci step will test the dspace-angular code against DSpace REST.
      # Direct that step to utilize a DSpace REST service that has been started in docker.
      # NOTE: These settings should be kept in sync with those in [src]/docker/docker-compose-ci.yml
      DSPACE_REST_HOST: 127.0.0.1
      DSPACE_REST_PORT: 8080
      DSPACE_REST_NAMESPACE: '/server'
      DSPACE_REST_SSL: false
      # Spin up UI on 127.0.0.1 to avoid host resolution issues in e2e tests with Node 18+
      DSPACE_UI_HOST: 127.0.0.1
      DSPACE_UI_PORT: 4000
      # Ensure all SSR caching is disabled in test environment
      DSPACE_CACHE_SERVERSIDE_BOTCACHE_MAX: 0
      DSPACE_CACHE_SERVERSIDE_ANONYMOUSCACHE_MAX: 0
      # Tell Cypress to run e2e tests using the same UI URL
      CYPRESS_BASE_URL: http://127.0.0.1:4000
      # When Chrome version is specified, we pin to a specific version of Chrome
      # Comment this out to use the latest release
      #CHROME_VERSION: "90.0.4430.212-1"
      # Bump Node heap size (OOM in CI after upgrading to Angular 15)
      NODE_OPTIONS: '--max-old-space-size=4096'
      # Project name to use when running "docker compose" prior to e2e tests
      COMPOSE_PROJECT_NAME: 'ci'
      # Docker Registry to use for Docker compose scripts below.
      # We use GitHub's Container Registry to avoid aggressive rate limits at DockerHub.
      DOCKER_REGISTRY: ghcr.io
    strategy:
      # Create a matrix of Node versions to test against (in parallel)
      matrix:
        node-version: [18.x, 20.x]
      # Do NOT exit immediately if one matrix job fails
      fail-fast: false
    # These are the actual CI steps to perform per job
    steps:
      # https://github.com/actions/checkout
      - name: Checkout codebase
        uses: actions/checkout@v4

      # https://github.com/actions/setup-node
      - name: Install Node.js ${{ matrix.node-version }}
        uses: actions/setup-node@v4
        with:
          node-version: ${{ matrix.node-version }}

      # If CHROME_VERSION env variable specified above, then pin to that version.
      # Otherwise, just install latest version of Chrome.
      - name: Install Chrome (for e2e tests)
        run: |
          if [[ -z "${CHROME_VERSION}" ]]
          then
             echo "Installing latest stable version"
             sudo apt-get update
             sudo apt-get --only-upgrade install google-chrome-stable -y
          else
             echo "Installing version ${CHROME_VERSION}"
             wget -q "https://dl.google.com/linux/chrome/deb/pool/main/g/google-chrome-stable/google-chrome-stable_${CHROME_VERSION}_amd64.deb"
             sudo dpkg -i "google-chrome-stable_${CHROME_VERSION}_amd64.deb"
          fi
          google-chrome --version

      # https://github.com/actions/cache/blob/main/examples.md#node---yarn
      - name: Get Yarn cache directory
        id: yarn-cache-dir-path
        run: echo "dir=$(yarn cache dir)" >> $GITHUB_OUTPUT
      - name: Cache Yarn dependencies
        uses: actions/cache@v4
        with:
          # Cache entire Yarn cache directory (see previous step)
          path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
          # Cache key is hash of yarn.lock. Therefore changes to yarn.lock will invalidate cache
          key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
          restore-keys: ${{ runner.os }}-yarn-

      - name: Install Yarn dependencies
        run: yarn install --frozen-lockfile

      - name: Build lint plugins
        run: yarn run build:lint

      - name: Run lint plugin tests
        run: yarn run test:lint:nobuild

      - name: Run lint
        run: yarn run lint:nobuild --quiet

      - name: Check for circular dependencies
        run: yarn run check-circ-deps

      - name: Run build
        run: yarn run build:prod

      - name: Run specs (unit tests)
        run: yarn run test:headless

      # Upload code coverage report to artifact (for one version of Node only),
      # so that it can be shared with the 'codecov' job (see below)
      # NOTE: Angular CLI only supports code coverage for specs. See https://github.com/angular/angular-cli/issues/6286
      - name: Upload code coverage report to Artifact
        uses: actions/upload-artifact@v4
        if: matrix.node-version == '18.x'
        with:
          name: coverage-report-${{ matrix.node-version }}
          path: 'coverage/dspace-angular/lcov.info'
          retention-days: 14

      # Login to our Docker registry, so that we can access private Docker images using "docker compose" below.
      - name: Login to ${{ env.DOCKER_REGISTRY }}
        uses: docker/login-action@v3
        with:
          registry: ${{ env.DOCKER_REGISTRY }}
          username: ${{ github.repository_owner }}
          password: ${{ secrets.GITHUB_TOKEN }}

      # Using "docker compose" start backend using CI configuration
      # and load assetstore from a cached copy
      - name: Start DSpace REST Backend via Docker (for e2e tests)
        run: |
          docker compose -f ./docker/docker-compose-ci.yml up -d
          docker compose -f ./docker/cli.yml -f ./docker/cli.assetstore.yml run --rm dspace-cli
          docker container ls

      # Run integration tests via Cypress.io
      # https://github.com/cypress-io/github-action
      # (NOTE: to run these e2e tests locally, just use 'ng e2e')
      - name: Run e2e tests (integration tests)
        uses: cypress-io/github-action@v6
        with:
          # Run tests in Chrome, headless mode (default)
          browser: chrome
          # Start app before running tests (will be stopped automatically after tests finish)
          start: yarn run serve:ssr
          # Wait for backend & frontend to be available
          # NOTE: We use the 'sites' REST endpoint to also ensure the database is ready
          wait-on: http://127.0.0.1:8080/server/api/core/sites, http://127.0.0.1:4000
          # Wait for 2 mins max for everything to respond
          wait-on-timeout: 120

      # Cypress always creates a video of all e2e tests (whether they succeeded or failed)
      # Save those in an Artifact
      - name: Upload e2e test videos to Artifacts
        uses: actions/upload-artifact@v4
        if: always()
        with:
          name: e2e-test-videos-${{ matrix.node-version }}
          path: cypress/videos

      # If e2e tests fail, Cypress creates a screenshot of what happened
      # Save those in an Artifact
      - name: Upload e2e test failure screenshots to Artifacts
        uses: actions/upload-artifact@v4
        if: failure()
        with:
          name: e2e-test-screenshots-${{ matrix.node-version }}
          path: cypress/screenshots

      - name: Stop app (in case it stays up after e2e tests)
        run: |
          app_pid=$(lsof -t -i:4000)
          if [[ ! -z $app_pid ]]; then
            echo "App was still up! (PID: $app_pid)"
            kill -9 $app_pid
          fi

      # Start up the app with SSR enabled (run in background)
      - name: Start app in SSR (server-side rendering) mode
        run: |
          nohup yarn run serve:ssr &
          printf 'Waiting for app to start'
          until curl --output /dev/null --silent --head --fail http://127.0.0.1:4000/home; do
            printf '.'
            sleep 2
          done
          echo "App started successfully."

      # Get homepage and verify that the <meta name="title"> tag includes "DSpace".
      # If it does, then SSR is working, as this tag is created by our MetadataService.
      # This step also prints entire HTML of homepage for easier debugging if grep fails.
      - name: Verify SSR (server-side rendering)
        run: |
          result=$(wget -O- -q http://127.0.0.1:4000/home)
          echo "$result"
          echo "$result" | grep -oE "<meta name=\"title\" [^>]*>" | grep DSpace

      - name: Stop running app
        run: kill -9 $(lsof -t -i:4000)

      - name: Shutdown Docker containers
        run: docker compose -f ./docker/docker-compose-ci.yml down

  # Codecov upload is a separate job in order to allow us to restart this separate from the entire build/test
  # job above. This is necessary because Codecov uploads seem to randomly fail at times.
  # See https://community.codecov.com/t/upload-issues-unable-to-locate-build-via-github-actions-api/3954
  codecov:
    # Must run after 'tests' job above
    needs: tests
    runs-on: ubuntu-latest
    steps:
      - name: Checkout
        uses: actions/checkout@v4

      # Download artifacts from previous 'tests' job
      - name: Download coverage artifacts
        uses: actions/download-artifact@v4

      # Now attempt upload to Codecov using its action.
      # NOTE: We use a retry action to retry the Codecov upload if it fails the first time.
      #
      # Retry action: https://github.com/marketplace/actions/retry-action
      # Codecov action: https://github.com/codecov/codecov-action
      - name: Upload coverage to Codecov.io
        uses: Wandalen/wretry.action@v1.3.0
        with:
          action: codecov/codecov-action@v4
          # Ensure codecov-action throws an error when it fails to upload
          # This allows us to auto-restart the action if an error is thrown
          with: |
            fail_ci_if_error: true
            token: ${{ secrets.CODECOV_TOKEN }}
          # Try re-running action 5 times max
          attempt_limit: 5
          # Run again in 30 seconds
          attempt_delay: 30000