Skip to content

Commit

Permalink
CI.yml: Set coverage config file to pyproject.toml
Browse files Browse the repository at this point in the history
  • Loading branch information
kvrigor committed Oct 4, 2024
1 parent ab102b9 commit 03f605d
Showing 1 changed file with 9 additions and 10 deletions.
19 changes: 9 additions & 10 deletions .github/workflows/continuous-integration.yml
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,10 @@ jobs:

name: ${{ matrix.os }} / Python ${{ matrix.python-version }}

env:
PSYDAC_MESH_DIR: ${{ github.workspace }}/mesh
OMP_NUM_THREADS: 2

steps:
- uses: actions/checkout@v4

Expand Down Expand Up @@ -171,10 +175,11 @@ jobs:
- name: Run single-process tests with Pytest
working-directory: ./pytest
run: |
export PSYDAC_MESH_DIR=$GITHUB_WORKSPACE/mesh
export OMP_NUM_THREADS=2
python -m pytest -n auto --cov psydac --cov-report xml:coverage.xml --pyargs psydac -m "not parallel and not petsc"
run: >-
python -m pytest -n auto --cov psydac
--cov-report xml:coverage.xml
--cov-config $GITHUB_WORKSPACE/pyproject.toml
--pyargs psydac -m "not parallel and not petsc"
- name: Run Codacy coverage reporter
uses: codacy/codacy-coverage-reporter-action@v1.3.0
Expand All @@ -185,22 +190,16 @@ jobs:
- name: Run MPI tests with Pytest
working-directory: ./pytest
run: |
export PSYDAC_MESH_DIR=$GITHUB_WORKSPACE/mesh
export OMP_NUM_THREADS=2
python mpi_tester.py --mpirun="mpiexec -n 4 ${MPI_OPTS}" --pyargs psydac -m "parallel and not petsc"
- name: Run single-process PETSc tests with Pytest
working-directory: ./pytest
run: |
export PSYDAC_MESH_DIR=$GITHUB_WORKSPACE/mesh
export OMP_NUM_THREADS=2
python -m pytest -n auto --pyargs psydac -m "not parallel and petsc"
- name: Run MPI PETSc tests with Pytest
working-directory: ./pytest
run: |
export PSYDAC_MESH_DIR=$GITHUB_WORKSPACE/mesh
export OMP_NUM_THREADS=2
python mpi_tester.py --mpirun="mpiexec -n 4 ${MPI_OPTS}" --pyargs psydac -m "parallel and petsc"
- name: Remove test directory
Expand Down

0 comments on commit 03f605d

Please sign in to comment.