Skip to content

Run ferc_to_sqlite and pudl_etl independent of integration tests #988

Run ferc_to_sqlite and pudl_etl independent of integration tests

Run ferc_to_sqlite and pudl_etl independent of integration tests #988

Workflow file for this run

---
name: pytest
on:
pull_request:
types:
- created
- opened
- synchronize
- ready_for_review
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}-${{ github.event_name }}
cancel-in-progress: true
env:
PUDL_OUTPUT: /home/runner/pudl-work/output/
PUDL_INPUT: /home/runner/pudl-work/input/
DAGSTER_HOME: /home/runner/pudl-work/dagster_home/
ETL_CONFIG: src/pudl/package_data/settings/etl_fast.yml
ETL_COMMANDLINE_OPTIONS: --gcs-cache-path=gs://zenodo-cache.catalyst.coop
jobs:
ci-docs:
runs-on: ubuntu-latest
strategy:
fail-fast: false
defaults:
run:
shell: bash -l {0}
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 2
- name: Install conda-lock environment with micromamba
uses: mamba-org/setup-micromamba@v1
with:
environment-file: environments/conda-lock.yml
environment-name: pudl-dev
cache-environment: true
- name: Log environment details
run: |
conda info
conda list
conda config --show-sources
conda config --show
printenv | sort
- name: Make input, output and dagster dirs
run: mkdir -p ${{ env.PUDL_OUTPUT }} ${{ env.PUDL_INPUT}} ${{ env.DAGSTER_HOME }}
- name: Lint and build PUDL documentation with Sphinx
run: |
pip install --no-deps --editable .
make docs-build
- name: Upload coverage
uses: actions/upload-artifact@v4
with:
name: coverage-docs
path: coverage.xml
ci-unit:
runs-on: ubuntu-latest
strategy:
fail-fast: false
defaults:
run:
shell: bash -l {0}
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 2
- name: Install conda-lock environment with micromamba
uses: mamba-org/setup-micromamba@v1
with:
environment-file: environments/conda-lock.yml
environment-name: pudl-dev
cache-environment: true
- name: Log environment details
run: |
conda info
conda list
conda config --show-sources
conda config --show
printenv | sort
- name: Make input, output and dagster dirs
run: mkdir -p ${{ env.PUDL_OUTPUT }} ${{ env.PUDL_INPUT}} ${{ env.DAGSTER_HOME }}
- name: Log SQLite3 version
run: |
which sqlite3
sqlite3 --version
- name: Run PUDL unit tests and collect test coverage
run: |
pip install --no-deps --editable .
make pytest-unit
- name: Upload coverage
uses: actions/upload-artifact@v4
with:
name: coverage-unit
path: coverage.xml
ci-integration:
needs:
- ci-unit
runs-on: ubuntu-22.04-4core
if: github.event.pull_request.draft == false
permissions:
contents: read
id-token: write
defaults:
run:
shell: bash -l {0}
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 2
- name: Install conda-lock environment with micromamba
uses: mamba-org/setup-micromamba@v1
with:
environment-file: environments/conda-lock.yml
environment-name: pudl-dev
cache-environment: true
- name: Install PUDL and its dependencies
run: pip install --no-deps --no-cache-dir .
- name: Log environment details
run: |
conda info
conda list
conda config --show-sources
conda config --show
printenv | sort
- name: Log SQLite3 version
run: |
which sqlite3
sqlite3 --version
- name: Compile Zenodo datastore DOIs for cache invalidation
run:
grep -e '.*10\.\(5281\|5072\)/zenodo\..*' src/pudl/workspace/datastore.py
| sed -e 's/",*$//g' | sed -e 's/^.*"//g' | sort > datastore-dois.txt
- name: Restore Zenodo datastore from cache if possible
uses: actions/cache@v3
id: cache-zenodo-datastore
with:
path: ${{ env.PUDL_INPUT }}
key: zenodo-datastore-${{ hashFiles('datastore-dois.txt') }}
- name: Make input, output and dagster dirs
run: mkdir -p ${{ env.PUDL_OUTPUT }} ${{ env.PUDL_INPUT}} ${{ env.DAGSTER_HOME }}
- name: List workspace contents
run: find /home/runner/pudl-work
- name: Set default GCP credentials
id: gcloud-auth
continue-on-error: true
uses: "google-github-actions/auth@v2"
with:
workload_identity_provider: "projects/345950277072/locations/global/workloadIdentityPools/gh-actions-pool/providers/gh-actions-provider"
service_account: "tox-pytest-github-action@catalyst-cooperative-pudl.iam.gserviceaccount.com"
- name: Run ferc_to_sqlite
env:
COVERAGE_FILE: .coverage.ferc_to_sqlite
run: |
coverage run --concurrency=multiprocessing \
src/pudl/ferc_to_sqlite/cli.py --clobber ${{ env.ETL_COMMANDLINE_OPTIONS }} ${{ env.ETL_CONFIG }}
- name: Run pudl_etl
env:
COVERAGE_FILE: .coverage.pudl_etl
run: |
alembic upgrade head
coverage run --concurrency=multiprocessing \
src/pudl/etl/cli.py ${{ env.ETL_COMMANDLINE_OPTIONS }} ${{ env.ETL_CONFIG }}
- name: Run integration tests
env:
COVERAGE_FILE: .coverage.pytest
run: |
coverage run --concurrency=multiprocessing \
-m pytest -n auto --live-dbs test/integration
- name: Checksum coverage files
run: ls .coverage* | xargs md5sum | sort
- name: Generate coverage
run: |
coverage --version
coverage combine
coverage xml
coverage report
- name: Upload coverage
uses: actions/upload-artifact@v4
with:
name: coverage-integration
path: coverage.xml
ci-coverage:
name: Upload coverage to CodeCov
runs-on: ubuntu-latest
needs:
- ci-docs
- ci-unit
- ci-integration
steps:
- uses: actions/checkout@v4
- name: Download coverage
id: download-coverage
uses: actions/download-artifact@v4
with:
path: coverage
- name: List downloaded files
run: find -type f
- name: Upload test coverage report to CodeCov
uses: codecov/codecov-action@v3
with:
directory: coverage
fail_ci_if_error: true