diff --git a/.github/actions/build-container/action.yml b/.github/actions/build-container/action.yml index 4c40c294d..34f56a7f3 100644 --- a/.github/actions/build-container/action.yml +++ b/.github/actions/build-container/action.yml @@ -26,13 +26,12 @@ runs: cache-to: type=gha,mode=max context: . file: ${{ inputs.dockerfile }} - push: ${{ ! github.event.pull_request.head.repo.fork }} + push: true sbom: true tags: ${{ steps.metadata.outputs.tags }} labels: ${{ steps.metadata.outputs.labels }} - name: Attest to REF image uses: actions/attest-build-provenance@v2 - if: ${{ ! github.event.pull_request.head.repo.fork }} with: subject-name: ghcr.io/${{ github.repository_owner }}/${{ inputs.container-name }} subject-digest: ${{ steps.push.outputs.digest }} diff --git a/.github/workflows/bump.yaml b/.github/workflows/bump.yaml index 7142704b5..a36616e72 100644 --- a/.github/workflows/bump.yaml +++ b/.github/workflows/bump.yaml @@ -20,7 +20,7 @@ on: jobs: bump_version: name: "Bump version and create changelog" - if: "!startsWith(github.event.head_commit.message, 'bump:')" + if: ! startsWith(github.event.head_commit.message, 'bump:') runs-on: ubuntu-latest env: CI_COMMIT_EMAIL: "ci-runner@climate-ref.invalid" diff --git a/.github/workflows/ci-integration.yaml b/.github/workflows/ci-integration.yaml index ec881c427..409a10601 100644 --- a/.github/workflows/ci-integration.yaml +++ b/.github/workflows/ci-integration.yaml @@ -1,50 +1,50 @@ -name: Integration tests +# name: Integration tests -on: - # Allow manual triggering of this workflow - workflow_dispatch: - # Run on each push to main and tagged version - push: - branches: [main] - tags: ['v*'] - # Runs every day at 2:15am (UTC) (~ midday in AEST) - schedule: - - cron: '2 15 * * *' +# on: +# # Allow manual triggering of this workflow +# workflow_dispatch: +# # Run on each push to main and tagged version +# push: +# branches: [main] +# tags: ['v*'] +# # Runs every day at 2:15am (UTC) (~ midday in AEST) +# schedule: +# - cron: '2 15 * * *' -jobs: - tests-slow: - if: github.repository == 'Climate-REF/climate-ref' - env: - REF_TEST_OUTPUT: "test-outputs" - PYTEST_ADDOPTS: "--slow" - strategy: - fail-fast: false - matrix: - python-version: [ "3.11", "3.13" ] - runs-on: "self-hosted" - defaults: - run: - shell: bash - steps: - - name: Check out repository - uses: actions/checkout@v4 - - uses: ./.github/actions/setup - with: - python-version: ${{ matrix.python-version }} - - name: Run tests - run: | - make virtual-environment - make fetch-test-data - uv run ref datasets fetch-data --registry ilamb --symlink - uv run ref datasets fetch-data --registry iomb --symlink - uv run ref datasets fetch-data --registry esmvaltool --symlink - uv run ref providers create-env - uv run pytest packages tests --slow --no-docker -r a -v - # Upload the scratch and executions directories as artifacts - - name: Upload scratch artifacts - uses: actions/upload-artifact@v4 - if: always() - with: - name: integration-output-${{ matrix.python-version }} - path: ${{ env.REF_TEST_OUTPUT }} - retention-days: 7 +# jobs: +# tests-slow: +# if: github.repository == 'Climate-REF/climate-ref' +# env: +# REF_TEST_OUTPUT: "test-outputs" +# PYTEST_ADDOPTS: "--slow" +# strategy: +# fail-fast: false +# matrix: +# python-version: [ "3.11", "3.13" ] +# runs-on: "self-hosted" +# defaults: +# run: +# shell: bash +# steps: +# - name: Check out repository +# uses: actions/checkout@v4 +# - uses: ./.github/actions/setup +# with: +# python-version: ${{ matrix.python-version }} +# - name: Run tests +# run: | +# make virtual-environment +# make fetch-test-data +# uv run ref datasets fetch-data --registry ilamb --symlink +# uv run ref datasets fetch-data --registry iomb --symlink +# uv run ref datasets fetch-data --registry esmvaltool --symlink +# uv run ref providers create-env +# uv run pytest packages tests --slow --no-docker -r a -v +# # Upload the scratch and executions directories as artifacts +# - name: Upload scratch artifacts +# uses: actions/upload-artifact@v4 +# if: always() +# with: +# name: integration-output-${{ matrix.python-version }} +# path: ${{ env.REF_TEST_OUTPUT }} +# retention-days: 7 diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 477d93023..6ca774b85 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -1,144 +1,144 @@ -name: CI +# name: CI -on: - pull_request: - push: - branches: [main] - tags: ['v*'] +# on: +# pull_request: +# push: +# branches: [main] +# tags: ['v*'] -jobs: - pre-commit: - runs-on: ubuntu-latest - steps: - - name: Check out repository - uses: actions/checkout@v4 - - uses: ./.github/actions/setup - # Verifies that Ruff and mypy checks are passing - - name: pre-commit - run: make pre-commit +# jobs: +# pre-commit: +# runs-on: ubuntu-latest +# steps: +# - name: Check out repository +# uses: actions/checkout@v4 +# - uses: ./.github/actions/setup +# # Verifies that Ruff and mypy checks are passing +# - name: pre-commit +# run: make pre-commit - tests: - env: - REF_TEST_OUTPUT: "test-outputs" - strategy: - fail-fast: false - matrix: - os: [ "ubuntu-latest" ] - python-version: [ "3.13" ] - runs-on: "${{ matrix.os }}" - defaults: - run: - # This might be needed for Windows and doesn't seem to affect unix-based systems - # so we include it. If you have better proof of whether this is needed or not, - # feel free to update. - shell: bash - steps: - - name: Check out repository - uses: actions/checkout@v4 - - uses: ./.github/actions/setup - with: - python-version: ${{ matrix.python-version }} - - name: Run tests - run: | - make fetch-test-data - make test - uv run coverage xml - - name: Upload coverage reports to Codecov with GitHub Action - uses: codecov/codecov-action@v5 - env: - CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} - - name: Upload scratch artifacts - uses: actions/upload-artifact@v4 - if: always() - with: - name: test-output-${{ matrix.python-version }} - path: ${{ env.REF_TEST_OUTPUT }} - retention-days: 7 +# tests: +# env: +# REF_TEST_OUTPUT: "test-outputs" +# strategy: +# fail-fast: false +# matrix: +# os: [ "ubuntu-latest" ] +# python-version: [ "3.13" ] +# runs-on: "${{ matrix.os }}" +# defaults: +# run: +# # This might be needed for Windows and doesn't seem to affect unix-based systems +# # so we include it. If you have better proof of whether this is needed or not, +# # feel free to update. +# shell: bash +# steps: +# - name: Check out repository +# uses: actions/checkout@v4 +# - uses: ./.github/actions/setup +# with: +# python-version: ${{ matrix.python-version }} +# - name: Run tests +# run: | +# make fetch-test-data +# make test +# uv run coverage xml +# - name: Upload coverage reports to Codecov with GitHub Action +# uses: codecov/codecov-action@v5 +# env: +# CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} +# - name: Upload scratch artifacts +# uses: actions/upload-artifact@v4 +# if: always() +# with: +# name: test-output-${{ matrix.python-version }} +# path: ${{ env.REF_TEST_OUTPUT }} +# retention-days: 7 - imports-without-extras: - strategy: - fail-fast: false - matrix: - os: [ "ubuntu-latest" ] - python-version: [ "3.11", "3.13" ] - runs-on: "${{ matrix.os }}" - steps: - - name: Check out repository - uses: actions/checkout@v4 - - uses: ./.github/actions/setup - with: - python-version: ${{ matrix.python-version }} - - name: Check importable without extras - run: | - uv run --isolated --with-editable packages/climate-ref --with typer --no-project scripts/test-install.py climate_ref - uv run --isolated --with-editable packages/climate-ref-core --with typer --no-project scripts/test-install.py climate_ref_core - uv run --isolated --with-editable packages/climate-ref-celery --with typer --no-project scripts/test-install.py climate_ref_celery - uv run --isolated --with-editable packages/climate-ref-ilamb --with typer --no-project scripts/test-install.py climate_ref_ilamb - uv run --isolated --with-editable packages/climate-ref-esmvaltool --with typer --no-project scripts/test-install.py climate_ref_esmvaltool - uv run --isolated --with-editable packages/climate-ref-pmp --with typer --no-project scripts/test-install.py climate_ref_pmp +# imports-without-extras: +# strategy: +# fail-fast: false +# matrix: +# os: [ "ubuntu-latest" ] +# python-version: [ "3.11", "3.13" ] +# runs-on: "${{ matrix.os }}" +# steps: +# - name: Check out repository +# uses: actions/checkout@v4 +# - uses: ./.github/actions/setup +# with: +# python-version: ${{ matrix.python-version }} +# - name: Check importable without extras +# run: | +# uv run --isolated --with-editable packages/climate-ref --with typer --no-project scripts/test-install.py climate_ref +# uv run --isolated --with-editable packages/climate-ref-core --with typer --no-project scripts/test-install.py climate_ref_core +# uv run --isolated --with-editable packages/climate-ref-celery --with typer --no-project scripts/test-install.py climate_ref_celery +# uv run --isolated --with-editable packages/climate-ref-ilamb --with typer --no-project scripts/test-install.py climate_ref_ilamb +# uv run --isolated --with-editable packages/climate-ref-esmvaltool --with typer --no-project scripts/test-install.py climate_ref_esmvaltool +# uv run --isolated --with-editable packages/climate-ref-pmp --with typer --no-project scripts/test-install.py climate_ref_pmp - check-build: - runs-on: ubuntu-latest - steps: - - name: Check out repository - uses: actions/checkout@v4 - - uses: ./.github/actions/setup - - name: Check build - run: | - make build - tar -tvf dist/climate_ref-*.tar.gz --wildcards '*climate_ref/py.typed' '*/LICENCE' '*/NOTICE' - tar -tvf dist/climate_ref_core-*.tar.gz --wildcards '*climate_ref_core/py.typed' '*/LICENCE' '*/NOTICE' - - name: Check installable - run: | - uv pip install dist/*.whl - uv pip freeze - uv run --no-sync python -c "import climate_ref; print(climate_ref.__version__)" - uv run --no-sync ref config list +# check-build: +# runs-on: ubuntu-latest +# steps: +# - name: Check out repository +# uses: actions/checkout@v4 +# - uses: ./.github/actions/setup +# - name: Check build +# run: | +# make build +# tar -tvf dist/climate_ref-*.tar.gz --wildcards '*climate_ref/py.typed' '*/LICENCE' '*/NOTICE' +# tar -tvf dist/climate_ref_core-*.tar.gz --wildcards '*climate_ref_core/py.typed' '*/LICENCE' '*/NOTICE' +# - name: Check installable +# run: | +# uv pip install dist/*.whl +# uv pip freeze +# uv run --no-sync python -c "import climate_ref; print(climate_ref.__version__)" +# uv run --no-sync ref config list - # Check if a changelog message was added to the PR - # Only runs on pull requests - check-for-changelog: - runs-on: ubuntu-latest - if: github.event.pull_request - steps: - - name: Check out repository - uses: actions/checkout@v4 - - uses: ./.github/actions/setup - - name: Get all changelog files - id: changed-changelog-files - uses: tj-actions/changed-files@v45 - with: - # Avoid using single or double quotes for multiline patterns - files: | - changelog/*.md - - name: Print out the changed files - if: steps.changed-files-specific.outputs.any_changed == 'true' - env: - ALL_CHANGED_FILES: ${{ steps.changed-changelog-files.outputs.all_changed_files }} - run: | - make changelog-draft - - name: Fail if no changelog message is present - if: steps.changed-changelog-files.outputs.any_changed == 'false' - run: | - echo "No changelog present." - exit 1 +# # Check if a changelog message was added to the PR +# # Only runs on pull requests +# check-for-changelog: +# runs-on: ubuntu-latest +# if: github.event.pull_request +# steps: +# - name: Check out repository +# uses: actions/checkout@v4 +# - uses: ./.github/actions/setup +# - name: Get all changelog files +# id: changed-changelog-files +# uses: tj-actions/changed-files@v45 +# with: +# # Avoid using single or double quotes for multiline patterns +# files: | +# changelog/*.md +# - name: Print out the changed files +# if: steps.changed-files-specific.outputs.any_changed == 'true' +# env: +# ALL_CHANGED_FILES: ${{ steps.changed-changelog-files.outputs.all_changed_files }} +# run: | +# make changelog-draft +# - name: Fail if no changelog message is present +# if: steps.changed-changelog-files.outputs.any_changed == 'false' +# run: | +# echo "No changelog present." +# exit 1 - smoke-test: - runs-on: ubuntu-latest - steps: - - name: Check out repository - uses: actions/checkout@v4 - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 - - name: Set up Docker Compose - uses: docker/setup-compose-action@v1 - - uses: ./.github/actions/setup - with: - python-version: ${{ matrix.python-version }} - - name: Run smoke test - run: | - bash scripts/smoke-test.sh - - name: Cleanup - if: always() - run: | - docker compose down -v +# smoke-test: +# runs-on: ubuntu-latest +# steps: +# - name: Check out repository +# uses: actions/checkout@v4 +# - name: Set up Docker Buildx +# uses: docker/setup-buildx-action@v3 +# - name: Set up Docker Compose +# uses: docker/setup-compose-action@v1 +# - uses: ./.github/actions/setup +# with: +# python-version: ${{ matrix.python-version }} +# - name: Run smoke test +# run: | +# bash scripts/smoke-test.sh +# - name: Cleanup +# if: always() +# run: | +# docker compose down -v diff --git a/.github/workflows/containers.yaml b/.github/workflows/containers.yaml deleted file mode 100644 index bb8627835..000000000 --- a/.github/workflows/containers.yaml +++ /dev/null @@ -1,33 +0,0 @@ -name: Build Containers - -on: - pull_request: - workflow_dispatch: - push: - branches: - - "main" - tags: - - "v*" - -permissions: - contents: read - packages: write - attestations: write - id-token: write - -jobs: - climate-ref: - name: climate-ref - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - uses: docker/setup-buildx-action@v3 - - uses: docker/login-action@v3 - with: - registry: ghcr.io - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} - - uses: ./.github/actions/build-container - with: - container-name: climate-ref - dockerfile: packages/climate-ref/Dockerfile diff --git a/.github/workflows/deploy.yaml b/.github/workflows/deploy.yaml index 450bb90ee..ab798692a 100644 --- a/.github/workflows/deploy.yaml +++ b/.github/workflows/deploy.yaml @@ -1,44 +1,44 @@ -# Deploys the published wheels to PyPI -# Uses the artifact from the release job to publish to PyPI +# # Deploys the published wheels to PyPI +# # Uses the artifact from the release job to publish to PyPI -name: Deploy +# name: Deploy -on: - release: - types: [published] +# on: +# release: +# types: [published] -defaults: - run: - shell: bash +# defaults: +# run: +# shell: bash -jobs: - deploy-pypi: - name: Deploy to PyPI - # Having an environment for deployment is strongly recommend by PyPI - # https://docs.pypi.org/trusted-publishers/adding-a-publisher/#github-actions - environment: deploy - runs-on: ubuntu-latest - permissions: - # this permission is mandatory for trusted publishing with PyPI - id-token: write - steps: - - uses: dsaltares/fetch-gh-release-asset@master - with: - repo: 'Climate-REF/climate-ref' - version: tags/${{ github.ref_name }} - regex: true - file: ".*" - target: 'dist/' - token: ${{ secrets.PAT }} - - name: Install uv - uses: astral-sh/setup-uv@v5 - with: - version: "0.5.x" - python-version: "3.12" - - name: Verify installable - # TODO: this step fails we fix https://github.com/Climate-REF/climate-ref/issues/217 - continue-on-error: true - run: uv pip install dist/*.whl - - name: Publish to PyPI - run: | - uv publish +# jobs: +# deploy-pypi: +# name: Deploy to PyPI +# # Having an environment for deployment is strongly recommend by PyPI +# # https://docs.pypi.org/trusted-publishers/adding-a-publisher/#github-actions +# environment: deploy +# runs-on: ubuntu-latest +# permissions: +# # this permission is mandatory for trusted publishing with PyPI +# id-token: write +# steps: +# - uses: dsaltares/fetch-gh-release-asset@master +# with: +# repo: 'Climate-REF/climate-ref' +# version: tags/${{ github.ref_name }} +# regex: true +# file: ".*" +# target: 'dist/' +# token: ${{ secrets.PAT }} +# - name: Install uv +# uses: astral-sh/setup-uv@v5 +# with: +# version: "0.5.x" +# python-version: "3.12" +# - name: Verify installable +# # TODO: this step fails we fix https://github.com/Climate-REF/climate-ref/issues/217 +# continue-on-error: true +# run: uv pip install dist/*.whl +# - name: Publish to PyPI +# run: | +# uv publish diff --git a/.github/workflows/install-pypi.yaml b/.github/workflows/install-pypi.yaml index 5fddbd035..fc86ffa5a 100644 --- a/.github/workflows/install-pypi.yaml +++ b/.github/workflows/install-pypi.yaml @@ -1,123 +1,123 @@ -# Test installation of the latest version from PyPI works. -# We make sure that we run the tests that apply to the version we installed, -# rather than the latest tests in main. -# The reason we do this, is that we want this workflow to test -# that installing from PyPI leads to a correct installation. -# If we tested against main, the tests could fail -# because the tests from main require the new features in main to pass. -name: Test installation PyPI +# # Test installation of the latest version from PyPI works. +# # We make sure that we run the tests that apply to the version we installed, +# # rather than the latest tests in main. +# # The reason we do this, is that we want this workflow to test +# # that installing from PyPI leads to a correct installation. +# # If we tested against main, the tests could fail +# # because the tests from main require the new features in main to pass. +# name: Test installation PyPI -on: - workflow_dispatch: - schedule: - # * is a special character in YAML so you have to quote this string - # This means At 03:00 on Wednesday. - # see https://crontab.guru/#0_0_*_*_3 - - cron: '0 3 * * 3' +# on: +# workflow_dispatch: +# schedule: +# # * is a special character in YAML so you have to quote this string +# # This means At 03:00 on Wednesday. +# # see https://crontab.guru/#0_0_*_*_3 +# - cron: '0 3 * * 3' -jobs: - test-pypi-install: - name: Test PyPI install ${{ matrix.install-target }} (${{ matrix.python-version }}, ${{ matrix.os }}) - strategy: - fail-fast: false - matrix: - os: ["ubuntu-latest", "macos-latest", "windows-latest"] - # Test SPEC0 supported python versions https://scientific-python.org/specs/spec-0000/ - python-version: [ "3.11", "3.12", "3.13" ] - install-target: [ "climate-ref", "climate-ref[aft-providers]"] - runs-on: "${{ matrix.os }}" - steps: - - name: Set up Python "${{ matrix.python-version }}" - id: setup-python - uses: actions/setup-python@v4 - with: - python-version: "${{ matrix.python-version }}" - - name: Install - run: | - pip install --upgrade pip wheel - pip install "${{ matrix.install-target }}" 2>stderr.txt - - name: Check no warnings - if: matrix.os != 'windows-latest' - run: | - if grep -q "WARN" stderr.txt; then echo "Warnings in pip install output" && cat stderr.txt && exit 1; else exit 0; fi - - name: Get version non-windows - if: matrix.os != 'windows-latest' - run: | - INSTALLED_VERSION=`python -c 'import climate_ref; print(f"v{climate_ref.__version__}")'` - echo $INSTALLED_VERSION - echo "INSTALLED_VERSION=$INSTALLED_VERSION" >> $GITHUB_ENV - - name: Get version windows - if: matrix.os == 'windows-latest' - run: | - chcp 65001 # use utf-8 - python -c 'import climate_ref; f = open("version.txt", "w"); f.write(f"INSTALLED_VERSION=v{climate_ref.__version__}"); f.close()' - echo "Showing version.txt" - type version.txt - type version.txt >> $env:GITHUB_ENV - - name: Check installed version environment variable - run: | - echo "${{ env.INSTALLED_VERSION }}" - - name: Checkout repository - uses: actions/checkout@v4 - with: - ref: ${{ env.INSTALLED_VERSION }} - # Windows can't clone the entire directory due to filename length - sparse-checkout: | - scripts - - name: Test installation - run: | - which python - python scripts/test-install.py climate_ref - test-pypi-install-core: - name: Test PyPI install ${{ matrix.install-target }} (${{ matrix.python-version }}, ${{ matrix.os }}) - strategy: - fail-fast: false - matrix: - os: ["ubuntu-latest", "macos-latest", "windows-latest"] - # Test against all security and bugfix versions: https://devguide.python.org/versions/ - python-version: [ "3.11", "3.12", "3.13" ] - install-target: [ "climate-ref-core"] - runs-on: "${{ matrix.os }}" - steps: - - name: Set up Python "${{ matrix.python-version }}" - id: setup-python - uses: actions/setup-python@v4 - with: - python-version: "${{ matrix.python-version }}" - - name: Install - run: | - pip install --upgrade pip wheel - # Additional core dep to run the script - pip install --upgrade typer - pip install "${{ matrix.install-target }}" 2>stderr.txt - - name: Check no warnings - if: matrix.os != 'windows-latest' - run: | - if grep -q "WARN" stderr.txt; then echo "Warnings in pip install output" && cat stderr.txt && exit 1; else exit 0; fi - - name: Get version non-windows - if: matrix.os != 'windows-latest' - run: | - INSTALLED_VERSION=`python -c 'import climate_ref_core; print(f"v{climate_ref_core.__version__}")'` - echo $INSTALLED_VERSION - echo "INSTALLED_VERSION=$INSTALLED_VERSION" >> $GITHUB_ENV - - name: Get version windows - if: matrix.os == 'windows-latest' - run: | - chcp 65001 # use utf-8 - python -c 'import climate_ref_core; f = open("version.txt", "w"); f.write(f"INSTALLED_VERSION=v{climate_ref_core.__version__}"); f.close()' - echo "Showing version.txt" - type version.txt - type version.txt >> $env:GITHUB_ENV - - name: Check installed version environment variable - run: | - echo "${{ env.INSTALLED_VERSION }}" - - name: Checkout repository - uses: actions/checkout@v4 - with: - ref: ${{ env.INSTALLED_VERSION }} - sparse-checkout: | - scripts - - name: Test installation - run: | - which python - python scripts/test-install.py climate_ref_core +# jobs: +# test-pypi-install: +# name: Test PyPI install ${{ matrix.install-target }} (${{ matrix.python-version }}, ${{ matrix.os }}) +# strategy: +# fail-fast: false +# matrix: +# os: ["ubuntu-latest", "macos-latest", "windows-latest"] +# # Test SPEC0 supported python versions https://scientific-python.org/specs/spec-0000/ +# python-version: [ "3.11", "3.12", "3.13" ] +# install-target: [ "climate-ref", "climate-ref[aft-providers]"] +# runs-on: "${{ matrix.os }}" +# steps: +# - name: Set up Python "${{ matrix.python-version }}" +# id: setup-python +# uses: actions/setup-python@v4 +# with: +# python-version: "${{ matrix.python-version }}" +# - name: Install +# run: | +# pip install --upgrade pip wheel +# pip install "${{ matrix.install-target }}" 2>stderr.txt +# - name: Check no warnings +# if: matrix.os != 'windows-latest' +# run: | +# if grep -q "WARN" stderr.txt; then echo "Warnings in pip install output" && cat stderr.txt && exit 1; else exit 0; fi +# - name: Get version non-windows +# if: matrix.os != 'windows-latest' +# run: | +# INSTALLED_VERSION=`python -c 'import climate_ref; print(f"v{climate_ref.__version__}")'` +# echo $INSTALLED_VERSION +# echo "INSTALLED_VERSION=$INSTALLED_VERSION" >> $GITHUB_ENV +# - name: Get version windows +# if: matrix.os == 'windows-latest' +# run: | +# chcp 65001 # use utf-8 +# python -c 'import climate_ref; f = open("version.txt", "w"); f.write(f"INSTALLED_VERSION=v{climate_ref.__version__}"); f.close()' +# echo "Showing version.txt" +# type version.txt +# type version.txt >> $env:GITHUB_ENV +# - name: Check installed version environment variable +# run: | +# echo "${{ env.INSTALLED_VERSION }}" +# - name: Checkout repository +# uses: actions/checkout@v4 +# with: +# ref: ${{ env.INSTALLED_VERSION }} +# # Windows can't clone the entire directory due to filename length +# sparse-checkout: | +# scripts +# - name: Test installation +# run: | +# which python +# python scripts/test-install.py climate_ref +# test-pypi-install-core: +# name: Test PyPI install ${{ matrix.install-target }} (${{ matrix.python-version }}, ${{ matrix.os }}) +# strategy: +# fail-fast: false +# matrix: +# os: ["ubuntu-latest", "macos-latest", "windows-latest"] +# # Test against all security and bugfix versions: https://devguide.python.org/versions/ +# python-version: [ "3.11", "3.12", "3.13" ] +# install-target: [ "climate-ref-core"] +# runs-on: "${{ matrix.os }}" +# steps: +# - name: Set up Python "${{ matrix.python-version }}" +# id: setup-python +# uses: actions/setup-python@v4 +# with: +# python-version: "${{ matrix.python-version }}" +# - name: Install +# run: | +# pip install --upgrade pip wheel +# # Additional core dep to run the script +# pip install --upgrade typer +# pip install "${{ matrix.install-target }}" 2>stderr.txt +# - name: Check no warnings +# if: matrix.os != 'windows-latest' +# run: | +# if grep -q "WARN" stderr.txt; then echo "Warnings in pip install output" && cat stderr.txt && exit 1; else exit 0; fi +# - name: Get version non-windows +# if: matrix.os != 'windows-latest' +# run: | +# INSTALLED_VERSION=`python -c 'import climate_ref_core; print(f"v{climate_ref_core.__version__}")'` +# echo $INSTALLED_VERSION +# echo "INSTALLED_VERSION=$INSTALLED_VERSION" >> $GITHUB_ENV +# - name: Get version windows +# if: matrix.os == 'windows-latest' +# run: | +# chcp 65001 # use utf-8 +# python -c 'import climate_ref_core; f = open("version.txt", "w"); f.write(f"INSTALLED_VERSION=v{climate_ref_core.__version__}"); f.close()' +# echo "Showing version.txt" +# type version.txt +# type version.txt >> $env:GITHUB_ENV +# - name: Check installed version environment variable +# run: | +# echo "${{ env.INSTALLED_VERSION }}" +# - name: Checkout repository +# uses: actions/checkout@v4 +# with: +# ref: ${{ env.INSTALLED_VERSION }} +# sparse-checkout: | +# scripts +# - name: Test installation +# run: | +# which python +# python scripts/test-install.py climate_ref_core diff --git a/.github/workflows/packaging.yaml b/.github/workflows/packaging.yaml new file mode 100644 index 000000000..ce27c6b3b --- /dev/null +++ b/.github/workflows/packaging.yaml @@ -0,0 +1,132 @@ +name: Packaging + +on: + pull_request: + workflow_dispatch: + push: + branches: + - "main" + tags: + - "v*" + +permissions: + contents: read + packages: write + attestations: write + id-token: write + +jobs: + containers: + name: Containers + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: docker/setup-buildx-action@v3 + - uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + - uses: ./.github/actions/build-container + with: + container-name: climate-ref + dockerfile: packages/climate-ref/Dockerfile + helm: + name: Helm Chart + runs-on: ubuntu-latest + permissions: + packages: write + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + - name: Install jq + run: | + sudo apt-get install --yes jq + - name: Install yq + run: | + pip install yq + - name: Generate SemVer + id: semantic-version + run: | + CHART_VERSION=$(yq -r '.version' helm/Chart.yaml) + LOCAL_SEGMENT=+pr-${{ github.event.pull_request.number }} + GENERATED_VERSION=${CHART_VERSION}${LOCAL_SEGMENT} + yq -Y -i ".version = \"$GENERATED_VERSION\"" helm/Chart.yaml + echo "generated-semver=$GENERATED_VERSION" >> $GITHUB_OUTPUT + - name: Chart | Push + uses: appany/helm-oci-chart-releaser@v0.5.0 + with: + name: ref + repository: climate-ref/charts + tag: ${{ steps.semantic-version.outputs.generated-semver }} + path: helm + registry: ghcr.io + registry_username: ${{ github.actor }} + registry_password: ${{ secrets.GITHUB_TOKEN }} + update_dependencies: 'true' + + test: + name: Test Helm Deployment + runs-on: ubuntu-latest + if: github.event_name == 'pull_request' + needs: [containers, helm] + steps: + - uses: actions/checkout@v4 + - name: Cache Sample Data (Restore) + id: cache-sample-data-restore + uses: actions/cache/restore@v4 + with: + path: ${{ github.workspace }}/cache/ref-config + key: ${{ runner.os }}-sample-data + enableCrossOsArchive: true + - name: Set permissions for cached data + run: | + sudo install -d --owner=1000 --group=1000 ${GITHUB_WORKSPACE}/cache/ref-config + - name: Start minikube + uses: medyagh/setup-minikube@latest + with: + mount-path: '${{ github.workspace }}/cache/ref-config:/cache/ref-config' + - name: Set up Helm + uses: azure/setup-helm@v4.3.0 + - name: Install Chart + run: | + helm install test oci://ghcr.io/climate-ref/charts/ref \ + --version=${{ needs.helm.outputs.generated-semver }} \ + --set climate-ref.image.tag=pr-${{ github.event.pull_request.number }} \ + -f helm/ci/gh-actions-values.yaml + + sleep 60 + kubectl get pods + echo "" + kubectl describe pod -l app.kubernetes.io/component=pmp + echo "" + kubectl logs -l app.kubernetes.io/component=pmp + - name: Run Migrations + run: | + kubectl exec deployment/test-ref-orchestrator -- ref config list + - name: Initialize Providers (pmp) + run: | + # Imports ilamb3 which tries to create /home/app/.config/ilamb3 on import, no way to tell it to live somewhere else + kubectl exec deployment/test-ref-pmp -- ref providers create-env --provider pmp + - name: Initialize Providers (emsvaltool) + run: | + kubectl exec deployment/test-ref-esmvaltool -- ref providers create-env --provider esmvaltool + - name: Fetch Test Data + run: | + kubectl exec deployment/test-ref-orchestrator -- ref datasets fetch-data --registry sample-data --output-directory /ref/sample-data + + - name: Cache Sample Data (Save) + uses: actions/cache/save@v4 + with: + path: ${{ github.workspace }}/cache/ref-config + key: ${{ runner.os }}-sample-data + + - name: Ingest Test Data (CMIP6) + run: | + kubectl exec deployment/test-ref-orchestrator -- ref -v datasets ingest --source-type cmip6 /ref/sample-data/CMIP6 + - name: Ingest Test Data (obs4mips) + run: | + kubectl exec deployment/test-ref-orchestrator -- ref -v datasets ingest --source-type obs4mips /ref/sample-data/obs4REF + - name: Simple Solve + run: | + kubectl exec deployment/test-ref-orchestrator -- ref -v solve --timeout 180 --one-per-provider diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index f58b3c886..ee1fd718e 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -1,48 +1,48 @@ -# Generate a draft release on GitHub when a new tag is pushed -# The draft release will contain draft release notes and some built wheels +# # Generate a draft release on GitHub when a new tag is pushed +# # The draft release will contain draft release notes and some built wheels -name: Release +# name: Release -on: - push: - tags: ['v*'] +# on: +# push: +# tags: ['v*'] -defaults: - run: - shell: bash +# defaults: +# run: +# shell: bash -jobs: - draft-release: - name: Create draft release - runs-on: ubuntu-latest - steps: - - name: Check out repository - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - uses: ./.github/actions/setup - - name: Add version to environment - run: | - PROJECT_VERSION=`sed -ne 's/^version = "\([0-9\.abrc]*\)"/\1/p' pyproject.toml` - echo "PROJECT_VERSION=$PROJECT_VERSION" >> $GITHUB_ENV - - name: Build package for PyPI - run: make build - - name: Generate Release Notes - run: | - echo "" >> ".github/release_template.md" - echo "## Changelog" >> ".github/release_template.md" - echo "" >> ".github/release_template.md" - uv run python scripts/changelog-to-release-template.py >> ".github/release_template.md" - echo "" >> ".github/release_template.md" - echo "## Changes" >> ".github/release_template.md" - echo "" >> ".github/release_template.md" - git log $(git describe --tags --abbrev=0 HEAD^)..HEAD --pretty='format:* %h %s' --no-merges >> ".github/release_template.md" - echo .github/release_template.md - - name: Create Release Draft - uses: softprops/action-gh-release@v2 - with: - body_path: ".github/release_template.md" - token: "${{ secrets.PAT }}" - draft: true - files: | - dist/* +# jobs: +# draft-release: +# name: Create draft release +# runs-on: ubuntu-latest +# steps: +# - name: Check out repository +# uses: actions/checkout@v4 +# with: +# fetch-depth: 0 +# - uses: ./.github/actions/setup +# - name: Add version to environment +# run: | +# PROJECT_VERSION=`sed -ne 's/^version = "\([0-9\.abrc]*\)"/\1/p' pyproject.toml` +# echo "PROJECT_VERSION=$PROJECT_VERSION" >> $GITHUB_ENV +# - name: Build package for PyPI +# run: make build +# - name: Generate Release Notes +# run: | +# echo "" >> ".github/release_template.md" +# echo "## Changelog" >> ".github/release_template.md" +# echo "" >> ".github/release_template.md" +# uv run python scripts/changelog-to-release-template.py >> ".github/release_template.md" +# echo "" >> ".github/release_template.md" +# echo "## Changes" >> ".github/release_template.md" +# echo "" >> ".github/release_template.md" +# git log $(git describe --tags --abbrev=0 HEAD^)..HEAD --pretty='format:* %h %s' --no-merges >> ".github/release_template.md" +# echo .github/release_template.md +# - name: Create Release Draft +# uses: softprops/action-gh-release@v2 +# with: +# body_path: ".github/release_template.md" +# token: "${{ secrets.PAT }}" +# draft: true +# files: | +# dist/* diff --git a/.gitignore b/.gitignore index 7411f76fb..5ed5ed9bf 100644 --- a/.gitignore +++ b/.gitignore @@ -158,3 +158,6 @@ packages/*/NOTICE # Local directory for data /data + +# Helm dependencies +helm/charts/* diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ccb4e0d58..3354d002c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -25,6 +25,7 @@ repos: - id: check-yaml args: - --unsafe + exclude: helm/templates - id: debug-statements - id: detect-private-key - id: end-of-file-fixer diff --git a/docs/installation.md b/docs/installation.md index 9169b9164..d4feb386e 100644 --- a/docs/installation.md +++ b/docs/installation.md @@ -7,7 +7,7 @@ This was a deliberate decision to make it easy to make the framework easy to ins Some of the [diagnostic providers](nutshell.md) require additional dependencies in order to run an execution. For these providers, the REF can automatically create a new Conda environment and install the required dependencies in this standalone environment. Each of these provider-specific environments are decoupled to allow for potentially incompatible dependencies. -This uses a bundled version of the [micromamba](https://github.com/mamba-org/micromamba-releases) +This uses a bundled version of [micromamba](https://github.com/mamba-org/micromamba-releases) to create and manage the environments so no additional dependencies are required. /// admonition | HPC Users @@ -65,7 +65,7 @@ The conda-forge packages are a work in progress and are not yet available. See [#80](https://github.com/Climate-REF/climate-ref/issues/80) for more information. /// -We intend on providing a recipe on conda-force. Once complete you can install `climate-ref` using `mamba` or `conda`: +We intend on providing a recipe on conda-forge. Once complete you can install `climate-ref` using `mamba` or `conda`: ```bash mamba install -c conda-forge climate-ref @@ -96,7 +96,7 @@ If you want to use the latest development version, you can build the Docker imag ```bash git clone https://github.com/Climate-REF/climate-ref.git cd climate-ref -docker-compose build +docker build ``` If you require the full-stack of services recommended for a production deployment, you can use the `docker-compose` file to start the services. @@ -125,7 +125,7 @@ See the [development documentation](development.md) for more information on how /// admonition | Windows support type: warning -Window's doesn't support some of the packages required by the [diagnostic providers](nutshell.md), +Windows doesn't support some of the packages required by the [diagnostic providers](nutshell.md), so we only support MacOS and Linux. Windows users are recommended to use [WSL](https://learn.microsoft.com/en-us/windows/wsl/install) or a Linux VM if they wish to use the REF. diff --git a/helm/.helmignore b/helm/.helmignore new file mode 100644 index 000000000..0e8a0eb36 --- /dev/null +++ b/helm/.helmignore @@ -0,0 +1,23 @@ +# Patterns to ignore when building packages. +# This supports shell glob matching, relative path matching, and +# negation (prefixed with !). Only one pattern per line. +.DS_Store +# Common VCS dirs +.git/ +.gitignore +.bzr/ +.bzrignore +.hg/ +.hgignore +.svn/ +# Common backup files +*.swp +*.bak +*.tmp +*.orig +*~ +# Various IDEs +.project +.idea/ +*.tmproj +.vscode/ diff --git a/helm/Chart.yaml b/helm/Chart.yaml new file mode 100644 index 000000000..cd034bdf3 --- /dev/null +++ b/helm/Chart.yaml @@ -0,0 +1,21 @@ +apiVersion: v2 +name: ref +description: A Helm chart for Kubernetes +type: application + +# This is the chart version. This version number should be incremented each time you make changes +# to the chart and its templates, including the app version. +# Versions are expected to follow Semantic Versioning (https://semver.org/) +version: 0.1.0 + +# This is the version number of the application being deployed. This version number should be +# incremented each time you make changes to the application. Versions are not expected to +# follow Semantic Versioning. They should reflect the version the application is using. +# It is recommended to use it with quotes. +appVersion: "0.2.0" + +dependencies: +- name: dragonfly + version: v1.33.1 + repository: oci://ghcr.io/dragonflydb/dragonfly/helm + condition: dragonfly.enabled diff --git a/helm/ci/gh-actions-values.yaml b/helm/ci/gh-actions-values.yaml new file mode 100644 index 000000000..c13a40a88 --- /dev/null +++ b/helm/ci/gh-actions-values.yaml @@ -0,0 +1,31 @@ +ingress: + host: Climate-ref.test + +service: + type: NodePort + +flower: + podSecurityContext: + runAsUser: 1000 + +defaults: + podSecurityContext: + runAsUser: 1000 + volumes: + - name: gha-cached + hostPath: + path: /cache/ref-config + - name: tmp + emptyDir: {} + volumeMounts: + - name: gha-cached + mountPath: /ref + readOnly: false + - name: tmp + mountPath: /tmp + readOnly: false + +dragonfly: + extraArgs: + - --proactor_threads=1 + - --maxmemory=256mb diff --git a/helm/templates/_helpers.tpl b/helm/templates/_helpers.tpl new file mode 100644 index 000000000..365445b69 --- /dev/null +++ b/helm/templates/_helpers.tpl @@ -0,0 +1,51 @@ +{{/* +Expand the name of the chart. +*/}} +{{- define "ref.name" -}} +{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }} +{{- end }} + +{{/* +Create a default fully qualified app name. +We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). +If release name contains chart name it will be used as a full name. +*/}} +{{- define "ref.fullname" -}} +{{- if .Values.fullnameOverride }} +{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }} +{{- else }} +{{- $name := default .Chart.Name .Values.nameOverride }} +{{- if contains $name .Release.Name }} +{{- .Release.Name | trunc 63 | trimSuffix "-" }} +{{- else }} +{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }} +{{- end }} +{{- end }} +{{- end }} + +{{/* +Create chart name and version as used by the chart label. +*/}} +{{- define "ref.chart" -}} +{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }} +{{- end }} + +{{/* +Common labels +*/}} +{{- define "ref.labels" -}} +helm.sh/chart: {{ include "ref.chart" . }} +{{ include "ref.selectorLabels" . }} +{{- if .Chart.AppVersion }} +app.kubernetes.io/version: {{ .Chart.AppVersion | quote }} +{{- end }} +app.kubernetes.io/managed-by: {{ .Release.Service }} +{{- end }} + +{{/* +Selector labels +*/}} +{{- define "ref.selectorLabels" -}} +app.kubernetes.io/name: {{ include "ref.name" . }} +app.kubernetes.io/instance: {{ .Release.Name }} +{{- end }} diff --git a/helm/templates/flower/deployment.yaml b/helm/templates/flower/deployment.yaml new file mode 100644 index 000000000..444896399 --- /dev/null +++ b/helm/templates/flower/deployment.yaml @@ -0,0 +1,80 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ include "ref.fullname" . }}-flower + labels: + app.kubernetes.io/component: flower + {{- include "ref.labels" . | nindent 4 }} +spec: + {{- if not .Values.flower.autoscaling.enabled }} + replicas: {{ .Values.flower.replicaCount }} + {{- end }} + selector: + matchLabels: + app.kubernetes.io/component: flower + {{- include "ref.selectorLabels" . | nindent 6 }} + template: + metadata: + annotations: + checksum/config: {{ include (print $.Template.BasePath "/flower/secret.yaml") . | sha256sum }} + {{- with .Values.flower.podAnnotations }} + {{- toYaml . | nindent 8 }} + {{- end }} + labels: + app.kubernetes.io/component: flower + {{- include "ref.labels" . | nindent 8 }} + {{- with .Values.flower.podLabels }} + {{- toYaml . | nindent 8 }} + {{- end }} + spec: + {{- with .Values.imagePullSecrets }} + imagePullSecrets: + {{- toYaml . | nindent 8 }} + {{- end }} + serviceAccountName: {{ include "ref.fullname" . }}-flower + {{- with .Values.flower.podSecurityContext }} + securityContext: + {{- toYaml . | nindent 8 }} + {{- end }} + containers: + - name: flower + {{- with .Values.flower.securityContext }} + securityContext: + {{- toYaml . | nindent 12 }} + {{- end }} + image: "{{ .Values.flower.image.repository }}:{{ .Values.flower.image.tag }}" + imagePullPolicy: {{ .Values.flower.image.pullPolicy }} + envFrom: + - secretRef: + name: {{ include "ref.fullname" . }}-flower + ports: + - name: http + containerPort: {{ .Values.flower.service.port }} + readinessProbe: + httpGet: + path: /healthcheck + port: http + {{- with .Values.flower.resources }} + resources: + {{- toYaml . | nindent 12 }} + {{- end }} + {{- with .Values.flower.volumeMounts }} + volumeMounts: + {{- toYaml . | nindent 12 }} + {{- end }} + {{- with .Values.flower.volumes }} + volumes: + {{- toYaml . | nindent 8 }} + {{- end }} + {{- with .Values.flower.nodeSelector }} + nodeSelector: + {{- toYaml . | nindent 8 }} + {{- end }} + {{- with .Values.flower.affinity }} + affinity: + {{- toYaml . | nindent 8 }} + {{- end }} + {{- with .Values.flower.tolerations }} + tolerations: + {{- toYaml . | nindent 8 }} + {{- end }} diff --git a/helm/templates/flower/secret.yaml b/helm/templates/flower/secret.yaml new file mode 100644 index 000000000..7c886b4b5 --- /dev/null +++ b/helm/templates/flower/secret.yaml @@ -0,0 +1,9 @@ +apiVersion: v1 +kind: Secret +metadata: + name: {{ include "ref.fullname" . }}-flower + labels: + app.kubernetes.io/component: flower + {{- include "ref.labels" . | nindent 4 }} +stringData: + {{- tpl (toYaml .Values.flower.env) . | nindent 2}} diff --git a/helm/templates/flower/service.yaml b/helm/templates/flower/service.yaml new file mode 100644 index 000000000..d7572ff5b --- /dev/null +++ b/helm/templates/flower/service.yaml @@ -0,0 +1,17 @@ +apiVersion: v1 +kind: Service +metadata: + name: {{ include "ref.fullname" . }}-flower + labels: + app.kubernetes.io/component: flower + {{- include "ref.labels" . | nindent 4 }} +spec: + type: {{ .Values.flower.service.type }} + ports: + - port: {{ .Values.flower.service.port }} + targetPort: http + protocol: TCP + name: http + selector: + app.kubernetes.io/component: flower + {{- include "ref.selectorLabels" . | nindent 4 }} diff --git a/helm/templates/flower/serviceaccount.yaml b/helm/templates/flower/serviceaccount.yaml new file mode 100644 index 000000000..6cf813908 --- /dev/null +++ b/helm/templates/flower/serviceaccount.yaml @@ -0,0 +1,14 @@ +{{- if .Values.flower.serviceAccount.create -}} +apiVersion: v1 +kind: ServiceAccount +metadata: + name: {{ include "ref.fullname" . }}-flower + labels: + app.kubernetes.io/component: flower + {{- include "ref.labels" . | nindent 4 }} + {{- with .Values.flower.serviceAccount.annotations }} + annotations: + {{- toYaml . | nindent 4 }} + {{- end }} +automountServiceAccountToken: {{ .Values.flower.serviceAccount.automount }} +{{- end }} diff --git a/helm/templates/flower/servicemonitor.yaml b/helm/templates/flower/servicemonitor.yaml new file mode 100644 index 000000000..37e9ff6ef --- /dev/null +++ b/helm/templates/flower/servicemonitor.yaml @@ -0,0 +1,21 @@ +{{- if .Values.flower.serviceMonitor.enabled }} +apiVersion: monitoring.coreos.com/v1 +kind: ServiceMonitor +metadata: + name: {{ include "ref.fullname" . }} + labels: + app.kubernetes.io/component: flower + {{- include "ref.labels" . | nindent 4 }} +spec: + selector: + matchLabels: + app.kubernetes.io/component: flower + {{- include "ref.labels" . | nindent 4 }} + endpoints: + - port: http + path: /metrics + interval: 30s + namespaceSelector: + matchNames: + - {{ .Release.Namespace }} +{{- end }} diff --git a/helm/templates/ingress.yaml b/helm/templates/ingress.yaml new file mode 100644 index 000000000..4e8720104 --- /dev/null +++ b/helm/templates/ingress.yaml @@ -0,0 +1,34 @@ +{{- if .Values.ingress.enabled -}} +apiVersion: networking.k8s.io/v1 +kind: Ingress +metadata: + name: {{ include "ref.fullname" . }} + labels: + {{- include "ref.labels" . | nindent 4 }} + {{- with .Values.ingress.labels }} + {{- toYaml . | nindent 4 }} + {{- end }} + {{- with .Values.ingress.annotations }} + annotations: + {{- toYaml . | nindent 4 }} + {{- end }} +spec: + {{- with .Values.ingress.className }} + ingressClassName: {{ . }} + {{- end }} + tls: + - hosts: + - {{ .Values.ingress.host | quote }} + secretName: {{ include "ref.fullname" . }}-ingress-cert + rules: + - host: {{ .Values.ingress.host | quote }} + http: + paths: + - path: / + pathType: ImplementationSpecific + backend: + service: + name: {{ include "ref.fullname" . }}-flower + port: + number: {{ .Values.flower.service.port }} +{{- end }} diff --git a/helm/templates/providers/deployment.yaml b/helm/templates/providers/deployment.yaml new file mode 100644 index 000000000..85c0d0bff --- /dev/null +++ b/helm/templates/providers/deployment.yaml @@ -0,0 +1,85 @@ +{{- range $provider, $spec := (omit .Values.providers "defaults") -}} +{{- $spec := merge (deepCopy $.Values.defaults) $spec -}} +{{- $args := list "celery" "start-worker" "--loglevel" "DEBUG" -}} +{{- if ne $provider "orchestrator" -}} {{- $args = concat $args (list "--provider" $provider) -}} {{- end -}} +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ include "ref.fullname" $ }}-{{ $provider }} + {{- with $spec.annotations }} + annotations: + {{- toYaml . | nindent 4 }} + {{- end }} + labels: + app.kubernetes.io/component: {{ $provider }} + {{- include "ref.labels" $ | nindent 4 }} +spec: + {{- if not (and $spec.autoscaling $spec.autoscaling.enabled) }} + replicas: {{ $spec.replicaCount }} + {{- end }} + selector: + matchLabels: + app.kubernetes.io/component: {{ $provider }} + {{- include "ref.selectorLabels" $ | nindent 6 }} + template: + metadata: + annotations: + checksum/config: {{ include (print $.Template.BasePath "/providers/secret.yaml") $ | sha256sum }} + {{- with $spec.podAnnotations }} + {{- toYaml . | nindent 8 }} + {{- end }} + labels: + app.kubernetes.io/component: {{ $provider }} + {{- include "ref.labels" $ | nindent 8 }} + {{- with $spec.podLabels }} + {{- toYaml . | nindent 8 }} + {{- end }} + spec: + {{- with $.Values.imagePullSecrets }} + imagePullSecrets: + {{- toYaml . | nindent 8 }} + {{- end }} + serviceAccountName: {{ include "ref.fullname" $ }}-{{ $provider }} + {{- with $spec.podSecurityContext }} + securityContext: + {{- toYaml . | nindent 8 }} + {{- end }} + containers: + - name: {{ $provider }} + {{- with $spec.securityContext }} + securityContext: + {{- toYaml . | nindent 12 }} + {{- end }} + image: "{{ $spec.image.repository }}:{{ $spec.image.tag }}" + imagePullPolicy: {{ $spec.image.pullPolicy }} + args: + {{- toYaml $args | nindent 8 }} + envFrom: + - secretRef: + name: {{ include "ref.fullname" $ }}-{{ $provider }} + {{- with $spec.resources }} + resources: + {{- toYaml . | nindent 12 }} + {{- end }} + {{- with $spec.volumeMounts }} + volumeMounts: + {{- toYaml . | nindent 8 }} + {{- end }} + {{- with $spec.volumes }} + volumes: + {{- toYaml . | nindent 6 }} + {{- end }} + {{- with $spec.nodeSelector }} + nodeSelector: + {{- toYaml . | nindent 8 }} + {{- end }} + {{- with $spec.affinity }} + affinity: + {{- toYaml . | nindent 8 }} + {{- end }} + {{- with $spec.tolerations }} + tolerations: + {{- toYaml . | nindent 8 }} + {{- end }} +--- +{{ end -}} diff --git a/helm/templates/providers/hpa.yaml b/helm/templates/providers/hpa.yaml new file mode 100644 index 000000000..0e7f16927 --- /dev/null +++ b/helm/templates/providers/hpa.yaml @@ -0,0 +1,32 @@ +{{- range $provider, $spec := (omit .Values.providers "defaults") -}} +{{- $spec := merge (deepCopy $.Values.defaults) $spec -}} +{{- if and $spec.autoscaling $spec.autoscaling.enabled }} +apiVersion: autoscaling/v2 +kind: HorizontalPodAutoscaler +metadata: + name: {{ include "ref.fullname" $ }}-{{ $provider }} + labels: + app.kubernetes.io/component: {{ $provider }} + {{- include "ref.labels" $ | nindent 4 }} +spec: + scaleTargetRef: + apiVersion: apps/v1 + kind: Deployment + name: {{ include "ref.fullname" $ }}-{{ $provider }} + minReplicas: {{ $spec.autoscaling.minReplicas }} + maxReplicas: {{ $spec.autoscaling.maxReplicas }} + metrics: + - type: Object + object: + metric: + name: flower_task_prefetch_time_seconds + describedObject: + apiVersion: v1 + kind: Service + name: celery-metrics-service # Service exposing Celery metrics + target: + type: Value + value: "50" # Scale up if queue length exceeds 50 +{{- end }} +--- +{{- end }} diff --git a/helm/templates/providers/pvc.yaml b/helm/templates/providers/pvc.yaml new file mode 100644 index 000000000..552d26d8c --- /dev/null +++ b/helm/templates/providers/pvc.yaml @@ -0,0 +1,15 @@ +{{- range $name, $size := .Values.createPVCs -}} +apiVersion: v1 +kind: PersistentVolumeClaim +metadata: + labels: + {{- include "ref.labels" $ | nindent 4 }} + name: {{ include "ref.fullname" $ }}-{{ $name }} +spec: + accessModes: + - ReadWriteMany + resources: + requests: + storage: {{ $size }} +--- +{{ end -}} diff --git a/helm/templates/providers/secret.yaml b/helm/templates/providers/secret.yaml new file mode 100644 index 000000000..b0ecfb302 --- /dev/null +++ b/helm/templates/providers/secret.yaml @@ -0,0 +1,13 @@ +{{- range $provider, $spec := (omit .Values.providers "defaults") -}} +{{- $spec := merge (deepCopy $.Values.defaults) $spec -}} +apiVersion: v1 +kind: Secret +metadata: + name: {{ include "ref.fullname" $ }}-{{ $provider }} + labels: + app.kubernetes.io/component: {{ $provider }} + {{- include "ref.labels" $ | nindent 4 }} +stringData: + {{- tpl (toYaml $spec.env) $ | nindent 2}} +--- +{{- end }} diff --git a/helm/templates/providers/serviceaccount.yaml b/helm/templates/providers/serviceaccount.yaml new file mode 100644 index 000000000..f6b9354a9 --- /dev/null +++ b/helm/templates/providers/serviceaccount.yaml @@ -0,0 +1,18 @@ +{{- range $provider, $spec := (omit .Values.providers "defaults") -}} +{{- $spec := merge (deepCopy $.Values.defaults) $spec -}} +{{- if and $spec.serviceAccount $spec.serviceAccount.create -}} +apiVersion: v1 +kind: ServiceAccount +metadata: + name: {{ include "ref.fullname" $ }}-{{ $provider }} + labels: + app.kubernetes.io/component: {{ $provider }} + {{- include "ref.labels" $ | nindent 4 }} + {{- with $spec.serviceAccount.annotations }} + annotations: + {{- toYaml . | nindent 4 }} + {{- end }} +automountServiceAccountToken: {{ $spec.serviceAccount.automount }} +{{- end }} +--- +{{- end }} diff --git a/helm/values.yaml b/helm/values.yaml new file mode 100644 index 000000000..7d072df46 --- /dev/null +++ b/helm/values.yaml @@ -0,0 +1,134 @@ +imagePullSecrets: [] +nameOverride: "" +fullnameOverride: "" + +ingress: + enabled: false + host: + className: "" + annotations: {} + # kubernetes.io/ingress.class: nginx + # kubernetes.io/tls-acme: "true" + labels: {} + +dragonfly: + storage: + enabled: true + +flower: + env: + CELERY_BROKER_URL: redis://{{ include "dragonfly.fullname" .Subcharts.dragonfly }}:{{ .Values.dragonfly.service.port }} + CELERY_RESULT_BACKEND: redis://{{ include "dragonfly.fullname" .Subcharts.dragonfly }}:{{ .Values.dragonfly.service.port }} + + serviceMonitor: + enabled: false + + replicaCount: 1 + + image: + repository: mher/flower + pullPolicy: IfNotPresent + tag: 2.0.1 + + serviceAccount: + create: true + automount: false + annotations: {} + name: "" + + podAnnotations: {} + podLabels: {} + + podSecurityContext: {} + + securityContext: + capabilities: + drop: + - ALL + readOnlyRootFilesystem: true + runAsNonRoot: true + + service: + type: ClusterIP + port: 5555 + + resources: {} + + autoscaling: + enabled: false + minReplicas: 1 + maxReplicas: 100 + targetCPUUtilizationPercentage: 80 + # targetMemoryUtilizationPercentage: 80 + + volumes: [] + volumeMounts: [] + nodeSelector: {} + tolerations: [] + affinity: {} + +# A mapping of name: size for PVCs to create. Name will be prepended with release name. +# Not mounted unless specified in volumes and volumeMounts arrays +createPVCs: {} + +defaults: + env: + CELERY_BROKER_URL: redis://{{ include "dragonfly.fullname" .Subcharts.dragonfly }}:{{ .Values.dragonfly.service.port }} + CELERY_RESULT_BACKEND: redis://{{ include "dragonfly.fullname" .Subcharts.dragonfly }}:{{ .Values.dragonfly.service.port }} + CELERY_ACCEPT_CONTENT: | + ["json", "pickle"] + REF_EXECUTOR: climate_ref_celery.executor.CeleryExecutor + + replicaCount: 1 + + image: + repository: ghcr.io/climate-ref/climate-ref + pullPolicy: IfNotPresent + tag: pr-388 + + annotations: {} + + serviceAccount: + create: true + automount: false + annotations: {} + name: "" + + podAnnotations: {} + podLabels: {} + + podSecurityContext: + fsGroup: 1000 + + securityContext: + capabilities: + drop: + - ALL + readOnlyRootFilesystem: true + runAsNonRoot: true + + service: + type: ClusterIP + port: 80 + + resources: + + autoscaling: + enabled: false + minReplicas: 1 + maxReplicas: 100 + targetCPUUtilizationPercentage: 80 + # targetMemoryUtilizationPercentage: 80 + + volumes: [] + volumeMounts: [] + nodeSelector: {} + tolerations: [] + affinity: {} + +# Map of instances to deploy, where the key is the instance name and the value is any provider-specific overrides. If no overrides are provided, the provider will be deployed with the defaults above. +providers: + orchestrator: {} + esmvaltool: {} + pmp: {} + # ilamb: {} # currently wants to write to .config/ilamb3/config.py, needs to be made configurable diff --git a/packages/climate-ref-celery/src/climate_ref_celery/celeryconf/base.py b/packages/climate-ref-celery/src/climate_ref_celery/celeryconf/base.py index 4f883f22a..35e4629dd 100644 --- a/packages/climate-ref-celery/src/climate_ref_celery/celeryconf/base.py +++ b/packages/climate-ref-celery/src/climate_ref_celery/celeryconf/base.py @@ -4,13 +4,16 @@ Other environments can use these settings as a base and override them as needed. """ -from climate_ref_core.env import get_env +import os + +from climate_ref_core.env import get_available_cpu_count, get_env env = get_env() broker_url = env.str("CELERY_BROKER_URL", "redis://localhost:6379/1") result_backend = env.str("CELERY_RESULT_BACKEND", broker_url) broker_connection_retry_on_startup = True +worker_concurrency = os.environ.get("CELERY_WORKER_CONCURRENCY", get_available_cpu_count()) # Accept JSON and pickle as content accept_content = ["json", "pickle"] diff --git a/packages/climate-ref-core/src/climate_ref_core/env.py b/packages/climate-ref-core/src/climate_ref_core/env.py index a059b20d9..75b33d1dd 100644 --- a/packages/climate-ref-core/src/climate_ref_core/env.py +++ b/packages/climate-ref-core/src/climate_ref_core/env.py @@ -32,4 +32,41 @@ def get_env() -> Env: return env +def get_available_cpu_count() -> int: + """ + Detect the number of CPU cores available considering cgroup limitations. + + Returns + ------- + : + The number of allocated CPUs or total cpu count if not running in a cgroup-limited environment. + """ + try: + # Check for CPU quota + with open("/sys/fs/cgroup/cpu/cpu.cfs_quota_us") as f: + quota = int(f.read()) + with open("/sys/fs/cgroup/cpu/cpu.cfs_period_us") as f: + period = int(f.read()) + + if quota > 0 and period > 0: + return quota // period + + # If no quota, check for cpuset + with open("/sys/fs/cgroup/cpuset/cpuset.cpus") as f: + cpuset = f.read().strip() + # Parse the cpuset string (e.g., "0-3", "0,2") + count = 0 + for part in cpuset.split(","): + if "-" in part: + start, end = map(int, part.split("-")) + count += end - start + 1 + else: + count += 1 + return count + + except FileNotFoundError: + # Not running in a cgroup-limited environment or cgroup files not found + return os.cpu_count() or 1 + + env = get_env() diff --git a/packages/climate-ref-core/src/climate_ref_core/providers.py b/packages/climate-ref-core/src/climate_ref_core/providers.py index d16fddb55..a42c6e4f7 100644 --- a/packages/climate-ref-core/src/climate_ref_core/providers.py +++ b/packages/climate-ref-core/src/climate_ref_core/providers.py @@ -267,7 +267,7 @@ def __init__( self._conda_exe: Path | None = None self._prefix: Path | None = None self.url = f"git+{repo}@{tag_or_commit}" if repo and tag_or_commit else None - self.env_vars: dict[str, str] = {} + self.env_vars: dict[str, str] = os.environ.copy() @property def prefix(self) -> Path: @@ -288,9 +288,26 @@ def prefix(self, path: Path) -> None: def configure(self, config: Config) -> None: """Configure the provider.""" self.prefix = config.paths.software / "conda" + self.env_vars.setdefault("HOME", str(self.prefix)) + + def _is_stale(self, path: Path) -> bool: + """Check if a file is older than `MICROMAMBA_MAX_AGE`. + + Parameters + ---------- + path + The path to the file to check. + + Returns + ------- + True if the file is older than `MICROMAMBA_MAX_AGE`, False otherwise. + """ + creation_time = datetime.datetime.fromtimestamp(path.stat().st_ctime) + age = datetime.datetime.now() - creation_time + return age > MICROMAMBA_MAX_AGE def _install_conda(self, update: bool) -> Path: - """Install micromamba in a temporary location. + """Install micromamba in a specific location. Parameters ---------- @@ -304,20 +321,15 @@ def _install_conda(self, update: bool) -> Path: """ conda_exe = self.prefix / "micromamba" - if conda_exe.exists() and update: - # Only update if the executable is older than `MICROMAMBA_MAX_AGE`. - creation_time = datetime.datetime.fromtimestamp(conda_exe.stat().st_ctime) - age = datetime.datetime.now() - creation_time - if age < MICROMAMBA_MAX_AGE: - update = False - - if not conda_exe.exists() or update: + if not conda_exe.exists() or update or self._is_stale(conda_exe): logger.info("Installing conda") self.prefix.mkdir(parents=True, exist_ok=True) - response = requests.get(_get_micromamba_url(), timeout=120) + response = requests.get(_get_micromamba_url(), timeout=120, stream=True) response.raise_for_status() with conda_exe.open(mode="wb") as file: - file.write(response.content) + for chunk in response.iter_content(chunk_size=8192): + if chunk: # Filter out keep-alive new chunks + file.write(chunk) conda_exe.chmod(stat.S_IRWXU) logger.info("Successfully installed conda.") @@ -378,7 +390,7 @@ def create_env(self) -> None: f"{self.env_path}", ] logger.debug(f"Running {' '.join(cmd)}") - subprocess.run(cmd, check=True) # noqa: S603 + subprocess.run(cmd, check=True, env=self.env_vars) # noqa: S603 if self.url is not None: logger.info(f"Installing development version of {self.slug} from {self.url}") @@ -393,7 +405,7 @@ def create_env(self) -> None: self.url, ] logger.debug(f"Running {' '.join(cmd)}") - subprocess.run(cmd, check=True) # noqa: S603 + subprocess.run(cmd, check=True, env=self.env_vars) # noqa: S603 def run(self, cmd: Iterable[str]) -> None: """ @@ -426,8 +438,6 @@ def run(self, cmd: Iterable[str]) -> None: *cmd, ] logger.info(f"Running '{' '.join(cmd)}'") - env_vars = os.environ.copy() - env_vars.update(self.env_vars) try: # This captures the log output until the execution is complete # We could poll using `subprocess.Popen` if we want something more responsive @@ -437,7 +447,7 @@ def run(self, cmd: Iterable[str]) -> None: stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True, - env=env_vars, + env=self.env_vars, ) logger.info("Command output: \n" + res.stdout) logger.info("Command execution successful") diff --git a/packages/climate-ref-core/tests/unit/test_providers.py b/packages/climate-ref-core/tests/unit/test_providers.py index 62f541df8..787c6478c 100644 --- a/packages/climate-ref-core/tests/unit/test_providers.py +++ b/packages/climate-ref-core/tests/unit/test_providers.py @@ -1,13 +1,16 @@ -import datetime +import io import logging import subprocess -import time from contextlib import contextmanager +from contextlib import nullcontext as does_not_raise from pathlib import Path import pytest +import pytest_mock +from requests import Response import climate_ref_core.providers +from climate_ref.config import Config from climate_ref_core.diagnostics import CommandLineDiagnostic, Diagnostic from climate_ref_core.exceptions import InvalidDiagnosticException, InvalidProviderException from climate_ref_core.providers import CondaDiagnosticProvider, DiagnosticProvider, import_provider @@ -106,7 +109,12 @@ def test_get_micromamba_url(mocker, sysname, machine): class TestCondaMetricsProvider: @pytest.fixture - def provider(self, tmp_path): + def provider(self, tmp_path, mocker): + mocker.patch.object( + climate_ref_core.providers.os.environ, + "copy", + return_value={"existing_var": "existing_value"}, + ) provider = CondaDiagnosticProvider("provider_name", "v0.23") provider.prefix = tmp_path / "conda" return provider @@ -117,37 +125,71 @@ def test_no_prefix(self): with pytest.raises(ValueError, match="No prefix for conda environments configured.*"): provider.prefix - def test_configure(self, config): - provider = CondaDiagnosticProvider("provider_name", "v0.23") + def test_configure(self, config: Config, provider: CondaDiagnosticProvider) -> None: provider.configure(config) assert isinstance(provider.prefix, Path) - @pytest.mark.parametrize("update", [True, False]) - def test_get_conda_exe(self, mocker, provider, update): - if update: - conda_exe = provider.prefix / "micromamba" - provider.prefix.mkdir() - conda_exe.touch() - mocker.patch.object( - climate_ref_core.providers, - "MICROMAMBA_MAX_AGE", - datetime.timedelta(microseconds=1), - ) - time.sleep(0.01) # wait for the executable to expire. + # Ensure configure() sets HOME to contain mamba writes + assert "HOME" in provider.env_vars - get = mocker.patch.object( - climate_ref_core.providers.requests, - "get", - create_autospec=True, + def test_preserves_env_vars(self, config: Config, mocker: pytest_mock.MockFixture) -> None: + mock_env = mocker.patch.object( + climate_ref_core.providers.os.environ, + "copy", + return_value={"preserved_var": "untouched", "overridden_var": "untouched"}, ) - response = get.return_value - response.content = b"test" - - result = provider.get_conda_exe(update=update) - - response.raise_for_status.assert_called_with() - assert result.read_bytes() == b"test" + provider = CondaDiagnosticProvider("provider_name", "v0.23") + provider.configure(config) + provider.env_vars["overridden_var"] = "overridden" + provider.env_vars["new_var"] = "added" + + # Ensure os.environ.copy was used vs manipulating the whole execution environ + mock_env.assert_called_once() + + # Ensure existing env vars are preserved and new ones are added + assert provider.env_vars == { + "preserved_var": "untouched", + "overridden_var": "overridden", + "new_var": "added", + "HOME": str(provider.prefix), + } + + @pytest.mark.parametrize( + "exists, update, is_stale, should_have_downloaded", + [ + (True, True, True, True), + (True, True, False, False), + (True, False, True, False), + (True, False, False, False), + (False, True, True, True), + (False, True, False, True), + (False, False, True, True), + (False, False, False, True), + ], + ) + def test_get_conda_exe( + self, mocker: pytest_mock.MockFixture, provider, exists, update, is_stale, should_have_downloaded + ): + fake_file = io.BytesIO() + + mock_conda_exe = mocker.MagicMock(spec=Path, exists=lambda: exists) + mock_conda_exe.open.return_value.__enter__.return_value.write = fake_file.write + mock_conda_exe.read_bytes = lambda: fake_file.getvalue() + mocker.patch.object(Path, "__truediv__", return_value=mock_conda_exe) + + mocker.patch.object(provider, "_is_stale", return_value=is_stale) + mocker.patch("climate_ref_core.providers.MICROMAMBA_MAX_AGE", 0) + + mock_response = mocker.MagicMock(spec=Response) + mock_response.iter_content.return_value.__iter__.return_value = iter([b"test"]) + mock_get = mocker.patch.object(climate_ref_core.providers.requests, "get", return_value=mock_response) + + if should_have_downloaded: + assert provider.get_conda_exe(update=update).read_bytes() == b"test" + mock_response.raise_for_status.assert_called_once() + else: + mock_get.assert_not_called() def test_get_conda_exe_repeat(self, mocker, tmp_path, provider): conda_exe = tmp_path / "micromamba" @@ -239,6 +281,7 @@ def lockfile_context(): f"{env_path}", ], check=True, + env={"existing_var": "existing_value"}, ) def test_skip_create_env(self, mocker, caplog, provider): @@ -256,12 +299,27 @@ def test_skip_create_env(self, mocker, caplog, provider): assert f"Environment at {env_path} already exists, skipping." in caplog.text - @pytest.mark.parametrize("env_exists", [True, False]) - def test_run(self, mocker, tmp_path, provider, env_exists): + @pytest.mark.parametrize( + ("env_exists", "raised"), + [ + (True, does_not_raise()), + ( + False, + pytest.raises( + RuntimeError, + match=r"Conda environment for provider `provider_name` not available at .*", + ), + ), + ], + ) + def test_run(self, mocker: pytest_mock.MockerFixture, tmp_path, provider, env_exists, raised): conda_exe = tmp_path / "conda" / "micromamba" - env_path = provider.prefix / "mock-env" - if env_exists: - env_path.mkdir(parents=True) + mock_env_path = mocker.Mock( + spec=Path, + new_callable=mocker.PropertyMock, + exists=lambda: env_exists, + __str__=lambda _: str(provider.prefix / "mock-env"), + ) mocker.patch.object( CondaDiagnosticProvider, @@ -278,7 +336,7 @@ def test_run(self, mocker, tmp_path, provider, env_exists): CondaDiagnosticProvider, "env_path", new_callable=mocker.PropertyMock, - return_value=env_path, + return_value=mock_env_path, ) run = mocker.patch.object( @@ -287,19 +345,9 @@ def test_run(self, mocker, tmp_path, provider, env_exists): create_autospec=True, ) - if not env_exists: - with pytest.raises( - RuntimeError, - match=(f"Conda environment for provider `{provider.slug}` not available at {env_path}."), - ): - provider.run(["mock-command"]) - else: - mocker.patch.object( - climate_ref_core.providers.os.environ, - "copy", - return_value={"existing_var": "existing_value"}, - ) - provider.env_vars = {"test_var": "test_value"} + provider.env_vars["test_var"] = "test_value" + + with raised: provider.run(["mock-command"]) run.assert_called_with( @@ -307,7 +355,7 @@ def test_run(self, mocker, tmp_path, provider, env_exists): f"{conda_exe}", "run", "--prefix", - f"{env_path}", + f"{mock_env_path}", "mock-command", ], check=True, diff --git a/packages/climate-ref/Dockerfile b/packages/climate-ref/Dockerfile index 62dcc1cd5..4c11259f1 100644 --- a/packages/climate-ref/Dockerfile +++ b/packages/climate-ref/Dockerfile @@ -2,7 +2,7 @@ # This docker container packages up the REF cli tool with the default set of diagnostic providers # used as part of the CMIP7 FastTrack process. -FROM ghcr.io/astral-sh/uv:python3.11-bookworm-slim AS build +FROM ghcr.io/astral-sh/uv:python3.11-bookworm-slim AS base RUN apt-get update && apt-get install -y --no-install-recommends \ git \ @@ -16,6 +16,7 @@ ENV UV_LINK_MODE=copy WORKDIR /app +FROM base AS build # Install the project's dependencies using the lockfile and settings RUN --mount=type=cache,target=/root/.cache/uv \ --mount=type=bind,source=uv.lock,target=uv.lock \ @@ -24,22 +25,18 @@ RUN --mount=type=cache,target=/root/.cache/uv \ # Then, add the rest of the project source code and install it # Installing separately from its dependencies allows optimal layer caching -ADD . /app +COPY . /app # Sync the project as non-editable installs RUN --mount=type=cache,target=/root/.cache/uv \ uv sync --frozen --no-editable --no-dev # Runtime container -FROM python:3.11-slim AS runtime +FROM base AS runtime LABEL maintainer="Jared Lewis " LABEL description="Base Docker image for the REF compute engine" -RUN apt-get update && apt-get install -y --no-install-recommends \ - git \ - && rm -rf /var/lib/apt/lists/* - # Create a non-root user RUN useradd -m -u 1000 app @@ -48,18 +45,30 @@ ENV PATH="/app/.venv/bin:${PATH}" ENV VIRTUAL_ENV=/app/.venv # Copy the installed packages from the build stage -COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx /app/.venv/bin/ COPY --from=build --chown=app:app /app/.venv /app/.venv COPY --from=build --chown=app:app /app/scripts /app/scripts # Location of the REF configuration files ENV REF_CONFIGURATION=/ref +# Set matplotlib config directory to a location with write permissions +ENV MPLCONFIGDIR=$REF_CONFIGURATION/software/matplotlib + +# Set micromamba root to locations with write permissions +ENV MAMBA_ROOT_PREFIX=$REF_CONFIGURATION/software/conda +ENV CONDA_PKGS_DIRS=$REF_CONFIGURATION/software/conda/pkgs +ENV XDG_CACHE_HOME=$REF_CONFIGURATION/cache + # Create necessary directories with proper permissions -RUN mkdir -p /ref /app/cache && chown -R app:app /ref /app/cache +RUN install --owner=app --group=app -d /ref /app/cache && \ + install --mode=555 -d /home/app/.conda && \ + touch /home/app/.conda/environments.txt # conda tries to write to ~/.conda/environments.txt despite its root being set elsewhere. See: https://github.com/conda/conda/issues/8804 + +# Switch to non-root user -- use numeric ID for k8s systems that enforce runAsUser +USER 1000 -# Switch to non-root user -USER app +# Pre-cache matplotlib fonts and ilamb3 config +RUN /app/.venv/bin/python -c "import matplotlib; import ilamb3" # Run the REF CLI tool by default ENTRYPOINT ["/app/.venv/bin/ref"] diff --git a/packages/climate-ref/src/climate_ref/cli/providers.py b/packages/climate-ref/src/climate_ref/cli/providers.py index 94c696651..a08ba6d86 100644 --- a/packages/climate-ref/src/climate_ref/cli/providers.py +++ b/packages/climate-ref/src/climate_ref/cli/providers.py @@ -59,7 +59,7 @@ def create_env( Create a conda environment containing the provider software. If no provider is specified, all providers will be installed. - If the provider is up to date or does not use a virtual environment, it will be skipped. + If the provider is up to date or does not use a conda environment, it will be skipped. """ config = ctx.obj.config db = ctx.obj.database @@ -74,12 +74,12 @@ def create_env( raise typer.Exit(code=1) for provider_ in providers: - txt = f"virtual environment for provider {provider_.slug}" + txt = f"conda environment for provider {provider_.slug}" if isinstance(provider_, CondaDiagnosticProvider): logger.info(f"Creating {txt} in {provider_.env_path}") provider_.create_env() logger.info(f"Finished creating {txt}") else: - logger.info(f"Skipping creating {txt} because it does use virtual environments.") + logger.info(f"Skipping creating {txt} because it does not use conda environments.") list_(ctx) diff --git a/packages/climate-ref/src/climate_ref/executor/result_handling.py b/packages/climate-ref/src/climate_ref/executor/result_handling.py index 83f532db1..6322a4b0c 100644 --- a/packages/climate-ref/src/climate_ref/executor/result_handling.py +++ b/packages/climate-ref/src/climate_ref/executor/result_handling.py @@ -21,7 +21,6 @@ from climate_ref.models.execution import Execution, ExecutionOutput, ResultOutputType from climate_ref_core.diagnostics import ExecutionResult, ensure_relative_path from climate_ref_core.exceptions import ResultValidationError -from climate_ref_core.logging import EXECUTION_LOG_FILENAME from climate_ref_core.metric_values import SeriesMetricValue as TSeries from climate_ref_core.pycmec.controlled_vocabulary import CV from climate_ref_core.pycmec.metric import CMECMetric @@ -195,12 +194,12 @@ def handle_execution_result( The result of the diagnostic execution, either successful or failed """ # Always copy log data to the results directory - _copy_file_to_results( - config.paths.scratch, - config.paths.results, - execution.output_fragment, - EXECUTION_LOG_FILENAME, - ) + # _copy_file_to_results( + # config.paths.scratch, + # config.paths.results, + # execution.output_fragment, + # EXECUTION_LOG_FILENAME, + # ) if not result.successful or result.metric_bundle_filename is None: logger.error(f"{execution} failed") diff --git a/packages/climate-ref/src/climate_ref/testing.py b/packages/climate-ref/src/climate_ref/testing.py index 5d96cbf3c..4b1f03f5c 100644 --- a/packages/climate-ref/src/climate_ref/testing.py +++ b/packages/climate-ref/src/climate_ref/testing.py @@ -9,7 +9,6 @@ from climate_ref.config import Config from climate_ref.database import Database -from climate_ref.executor import handle_execution_result from climate_ref.models import Execution, ExecutionGroup from climate_ref_core.dataset_registry import dataset_registry_manager, fetch_all_files from climate_ref_core.diagnostics import Diagnostic, ExecutionResult @@ -112,5 +111,9 @@ def validate_result(diagnostic: Diagnostic, config: Config, result: ExecutionRes if not result.to_output_path("out.log").exists(): result.to_output_path("out.log").touch() + # Import late to avoid importing executors, + # some of which have on-import side effects, at package load time + from climate_ref.executor import handle_execution_result + # This checks if the bundles are valid handle_execution_result(config, database=database, execution=execution, result=result)