diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md index a90c06f9a9..75da414db6 100644 --- a/.github/CONTRIBUTING.md +++ b/.github/CONTRIBUTING.md @@ -92,7 +92,7 @@ pre-commit run --all ## API Documentation We aim to write function docstrings according to the [Google Python style-guide](https://github.com/google/styleguide/blob/gh-pages/pyguide.md#38-comments-and-docstrings). These are used to automatically generate package documentation on the nf-core website using Sphinx. -You can find this documentation here: [https://nf-co.re/tools-docs/](https://nf-co.re/tools-docs/) +You can find this documentation here: [https://nf-co.re/tools/docs/](https://nf-co.re/tools/docs/) If you would like to test the documentation, you can install Sphinx locally by following Sphinx's [installation instruction](https://www.sphinx-doc.org/en/master/usage/installation.html). Once done, you can run `make clean` and then `make html` in the `docs/api` directory of `nf-core tools`. diff --git a/.github/renovate.json5 b/.github/renovate.json5 new file mode 100644 index 0000000000..f9b377c615 --- /dev/null +++ b/.github/renovate.json5 @@ -0,0 +1,5 @@ +{ + $schema: "https://docs.renovatebot.com/renovate-schema.json", + extends: ["github>nf-core/ops//.github/renovate/default.json5"], + baseBranches: ["dev"], +} diff --git a/.github/workflows/create-lint-wf.yml b/.github/workflows/create-lint-wf.yml index 8f6836f309..2a692da799 100644 --- a/.github/workflows/create-lint-wf.yml +++ b/.github/workflows/create-lint-wf.yml @@ -17,7 +17,7 @@ env: jobs: MakeTestWorkflow: - runs-on: ubuntu-latest + runs-on: self-hosted env: NXF_ANSI_LOG: false strategy: @@ -27,14 +27,14 @@ jobs: - "latest-everything" steps: # Get the repo code - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 name: Check out source-code repository # Set up nf-core/tools - - name: Set up Python 3.11 + - name: Set up Python 3.12 uses: actions/setup-python@v4 with: - python-version: 3.11 + python-version: 3.12 - name: Install python dependencies run: | @@ -48,84 +48,101 @@ jobs: version: ${{ matrix.NXF_VER }} # Install the Prettier linting tools - - uses: actions/setup-node@v3 - - - name: Install Prettier - run: npm install -g prettier + - uses: actions/setup-node@v4 + with: + node-version: "20" - # Install the editorconfig linting tools - - name: Install editorconfig-checker - run: npm install -g editorconfig-checker + - name: Install Prettier and editorconfig-checker + run: npm install -g prettier editorconfig-checker # Build a pipeline from the template - name: nf-core create - run: nf-core --log-file log.txt create -n testpipeline -d "This pipeline is for testing" -a "Testing McTestface" --plain + run: | + mkdir create-lint-wf && cd create-lint-wf + export NXF_WORK=$(pwd) + nf-core --log-file log.txt create -n testpipeline -d "This pipeline is for testing" -a "Testing McTestface" --plain # Try syncing it before we change anything - name: nf-core sync run: nf-core --log-file log.txt sync --dir nf-core-testpipeline/ + working-directory: create-lint-wf # Build a module from the template - name: nf-core modules create run: nf-core --log-file log.txt modules create bpipe --dir nf-core-testpipeline --author @nf-core-bot --label process_low --meta + working-directory: create-lint-wf # Run code style linting - name: Run Prettier --check - run: prettier --check nf-core-testpipeline + run: prettier --check create-lint-wf/nf-core-testpipeline - name: Run ECLint check run: editorconfig-checker -exclude README.md $(find nf-core-testpipeline/.* -type f | grep -v '.git\|.py\|md\|json\|yml\|yaml\|html\|css\|work\|.nextflow\|build\|nf_core.egg-info\|log.txt\|Makefile') + working-directory: create-lint-wf # Update modules to the latest version - name: nf-core modules update run: nf-core --log-file log.txt modules update --dir nf-core-testpipeline --all --no-preview + working-directory: create-lint-wf # Remove TODO statements - name: remove TODO run: find nf-core-testpipeline -type f -exec sed -i '/TODO nf-core:/d' {} \; + working-directory: create-lint-wf # Replace zenodo.XXXXXX to pass readme linting - name: replace zenodo.XXXXXX run: find nf-core-testpipeline -type f -exec sed -i 's/zenodo.XXXXXX/zenodo.123456/g' {} \; + working-directory: create-lint-wf # Run nf-core linting - name: nf-core lint run: nf-core --log-file log.txt --hide-progress lint --dir nf-core-testpipeline --fail-ignored --fail-warned + working-directory: create-lint-wf # Run the other nf-core commands - name: nf-core list run: nf-core --log-file log.txt list + working-directory: create-lint-wf # - name: nf-core licences # run: nf-core --log-file log.txt licences nf-core-testpipeline - name: nf-core schema run: nf-core --log-file log.txt schema build --dir nf-core-testpipeline/ --no-prompts + working-directory: create-lint-wf - name: nf-core bump-version run: nf-core --log-file log.txt bump-version --dir nf-core-testpipeline/ 1.1 + working-directory: create-lint-wf - name: nf-core lint in release mode run: nf-core --log-file log.txt --hide-progress lint --dir nf-core-testpipeline --fail-ignored --fail-warned --release + working-directory: create-lint-wf - name: nf-core modules install run: nf-core --log-file log.txt modules install fastqc --dir nf-core-testpipeline/ --force + working-directory: create-lint-wf - name: nf-core modules install gitlab run: nf-core --log-file log.txt modules --git-remote https://gitlab.com/nf-core/modules-test.git --branch branch-tester install fastp --dir nf-core-testpipeline/ + working-directory: create-lint-wf - name: nf-core modules list local run: nf-core --log-file log.txt modules list local --dir nf-core-testpipeline/ + working-directory: create-lint-wf - name: nf-core modules list remote run: nf-core --log-file log.txt modules list remote + working-directory: create-lint-wf - name: nf-core modules list remote gitlab run: nf-core --log-file log.txt modules --git-remote https://gitlab.com/nf-core/modules-test.git list remote + working-directory: create-lint-wf - name: Upload log file artifact if: ${{ always() }} uses: actions/upload-artifact@v3 with: name: nf-core-log-file - path: log.txt + path: create-lint-wf/log.txt diff --git a/.github/workflows/create-test-lint-wf-template.yml b/.github/workflows/create-test-lint-wf-template.yml index 2689805dd1..b4d8aead9f 100644 --- a/.github/workflows/create-test-lint-wf-template.yml +++ b/.github/workflows/create-test-lint-wf-template.yml @@ -33,13 +33,13 @@ jobs: - "template_skip_nf_core_configs.yml" steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 name: Check out source-code repository - - name: Set up Python 3.11 + - name: Set up Python 3.12 uses: actions/setup-python@v4 with: - python-version: 3.11 + python-version: 3.12 - name: Install python dependencies run: | @@ -52,7 +52,9 @@ jobs: version: latest-everything # Install the Prettier linting tools - - uses: actions/setup-node@v3 + - uses: actions/setup-node@v4 + with: + node-version: "20" - name: Install Prettier run: npm install -g prettier @@ -64,75 +66,86 @@ jobs: # Create template files - name: Create template skip all (except github) run: | - printf "prefix: my-prefix\nskip: ['ci', 'github_badges', 'igenomes', 'nf_core_configs']" > template_skip_all.yml + mkdir create-test-lint-wf + export NXF_WORK=$(pwd) + printf "prefix: my-prefix\nskip: ['ci', 'github_badges', 'igenomes', 'nf_core_configs']" > create-test-lint-wf/template_skip_all.yml - name: Create template skip github_badges run: | - printf "prefix: my-prefix\nskip: github_badges" > template_skip_github_badges.yml + printf "prefix: my-prefix\nskip: github_badges" > create-test-lint-wf/template_skip_github_badges.yml - name: Create template skip igenomes run: | - printf "prefix: my-prefix\nskip: igenomes" > template_skip_igenomes.yml + printf "prefix: my-prefix\nskip: igenomes" > create-test-lint-wf/template_skip_igenomes.yml - name: Create template skip ci run: | - printf "prefix: my-prefix\nskip: ci" > template_skip_ci.yml + printf "prefix: my-prefix\nskip: ci" > create-test-lint-wf/template_skip_ci.yml - name: Create template skip nf_core_configs run: | - printf "prefix: my-prefix\nskip: nf_core_configs" > template_skip_nf_core_configs.yml + printf "prefix: my-prefix\nskip: nf_core_configs" > create-test-lint-wf/template_skip_nf_core_configs.yml # Create a pipeline from the template - name: create a pipeline from the template ${{ matrix.TEMPLATE }} run: | + cd create-test-lint-wf nf-core --log-file log.txt create -n testpipeline -d "This pipeline is for testing" -a "Testing McTestface" --template-yaml ${{ matrix.TEMPLATE }} - name: run the pipeline run: | + cd create-test-lint-wf nextflow run my-prefix-testpipeline -profile test,docker --outdir ./results # Remove results folder before linting - name: remove results folder run: | - rm -rf ./results + rm -rf create-test-lint-wf/results # Try syncing it before we change anything - name: nf-core sync - run: nf-core --log-file log.txt sync --dir my-prefix-testpipeline/ + run: nf-core --log-file log.txt sync --dir create-test-lint-wf/my-prefix-testpipeline/ # Run code style linting - name: Run Prettier --check - run: prettier --check my-prefix-testpipeline + run: prettier --check create-test-lint-wf/my-prefix-testpipeline - name: Run ECLint check run: editorconfig-checker -exclude README.md $(find my-prefix-testpipeline/.* -type f | grep -v '.git\|.py\|md\|json\|yml\|yaml\|html\|css\|work\|.nextflow\|build\|nf_core.egg-info\|log.txt\|Makefile') + working-directory: create-test-lint-wf # Remove TODO statements - name: remove TODO run: find my-prefix-testpipeline -type f -exec sed -i '/TODO nf-core:/d' {} \; + working-directory: create-test-lint-wf # Replace zenodo.XXXXXX to pass readme linting - name: replace zenodo.XXXXXX run: find my-prefix-testpipeline -type f -exec sed -i 's/zenodo.XXXXXX/zenodo.123456/g' {} \; + working-directory: create-test-lint-wf # Run nf-core linting - name: nf-core lint run: nf-core --log-file log.txt --hide-progress lint --dir my-prefix-testpipeline --fail-warned + working-directory: create-test-lint-wf # Run bump-version - name: nf-core bump-version run: nf-core --log-file log.txt bump-version --dir my-prefix-testpipeline/ 1.1 + working-directory: create-test-lint-wf # Run nf-core linting in release mode - name: nf-core lint in release mode run: nf-core --log-file log.txt --hide-progress lint --dir my-prefix-testpipeline --fail-warned --release + working-directory: create-test-lint-wf - name: Tar files run: tar -cvf artifact_files.tar log.txt template_skip*.yml + working-directory: create-test-lint-wf - name: Upload log file artifact if: ${{ always() }} uses: actions/upload-artifact@v3 with: name: nf-core-log-file - path: artifact_files.tar + path: create-test-lint-wf/artifact_files.tar diff --git a/.github/workflows/create-test-wf.yml b/.github/workflows/create-test-wf.yml index 5faa59772c..e4779d0684 100644 --- a/.github/workflows/create-test-wf.yml +++ b/.github/workflows/create-test-wf.yml @@ -26,13 +26,13 @@ jobs: - "23.04.0" - "latest-everything" steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 name: Check out source-code repository - - name: Set up Python 3.11 + - name: Set up Python 3.12 uses: actions/setup-python@v4 with: - python-version: 3.11 + python-version: 3.12 - name: Install python dependencies run: | @@ -46,6 +46,8 @@ jobs: - name: Run nf-core/tools run: | + mkdir create-test-wf && cd create-test-wf + export NXF_WORK=$(pwd) nf-core --log-file log.txt create -n testpipeline -d "This pipeline is for testing" -a "Testing McTestface" --plain nextflow run nf-core-testpipeline -profile test,docker --outdir ./results @@ -54,4 +56,4 @@ jobs: uses: actions/upload-artifact@v3 with: name: nf-core-log-file - path: log.txt + path: create-test-wf/log.txt diff --git a/.github/workflows/deploy-pypi.yml b/.github/workflows/deploy-pypi.yml index 1f539fe09a..b847df9218 100644 --- a/.github/workflows/deploy-pypi.yml +++ b/.github/workflows/deploy-pypi.yml @@ -13,13 +13,13 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 name: Check out source-code repository - - name: Set up Python 3.11 + - name: Set up Python 3.12 uses: actions/setup-python@v4 with: - python-version: 3.11 + python-version: 3.12 - name: Install python dependencies run: | diff --git a/.github/workflows/fix-linting.yml b/.github/workflows/fix-linting.yml index ed2314046a..d846151205 100644 --- a/.github/workflows/fix-linting.yml +++ b/.github/workflows/fix-linting.yml @@ -13,7 +13,7 @@ jobs: runs-on: ubuntu-latest steps: # Use the @nf-core-bot token to check out so we can push later - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: token: ${{ secrets.nf_core_bot_auth_token }} @@ -24,7 +24,9 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.nf_core_bot_auth_token }} - - uses: actions/setup-node@v3 + - uses: actions/setup-node@v4 + with: + node-version: "20" - name: Install Prettier run: npm install -g prettier @prettier/plugin-php @@ -38,10 +40,10 @@ jobs: # Override to remove the default --check flag so that we make changes options: "--color" - - name: Set up Python 3.11 + - name: Set up Python 3.12 uses: actions/setup-python@v4 with: - python-version: 3.11 + python-version: 3.12 - name: python-isort uses: isort/isort-action@v1.0.0 with: diff --git a/.github/workflows/lint-code.yml b/.github/workflows/lint-code.yml index edff9a8c82..168fac653c 100644 --- a/.github/workflows/lint-code.yml +++ b/.github/workflows/lint-code.yml @@ -14,11 +14,13 @@ concurrency: jobs: EditorConfig: - runs-on: ubuntu-latest + runs-on: ["self-hosted"] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - - uses: actions/setup-node@v3 + - uses: actions/setup-node@v4 + with: + node-version: "20" - name: Install editorconfig-checker run: npm install -g editorconfig-checker @@ -28,11 +30,13 @@ jobs: run: editorconfig-checker -exclude README.md $(git ls-files | grep -v 'test\|.py\|md\|json\|yml\|yaml\|html\|css\|Makefile') Prettier: - runs-on: ubuntu-latest + runs-on: ["self-hosted"] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - - uses: actions/setup-node@v3 + - uses: actions/setup-node@v4 + with: + node-version: "20" - name: Install Prettier run: npm install -g prettier @@ -41,9 +45,9 @@ jobs: run: prettier --check ${GITHUB_WORKSPACE} PythonBlack: - runs-on: ubuntu-latest + runs-on: ["self-hosted"] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Check code lints with Black uses: psf/black@stable @@ -71,15 +75,15 @@ jobs: allow-repeats: false isort: - runs-on: ubuntu-latest + runs-on: ["self-hosted"] steps: - name: Check out source-code repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - - name: Set up Python 3.11 + - name: Set up Python 3.12 uses: actions/setup-python@v4 with: - python-version: 3.11 + python-version: 3.12 - name: python-isort uses: isort/isort-action@v1.1.0 with: @@ -87,12 +91,12 @@ jobs: requirementsFiles: "requirements.txt requirements-dev.txt" static-type-check: - runs-on: ubuntu-latest + runs-on: ["self-hosted"] steps: - - uses: actions/checkout@v2 - - uses: actions/setup-python@v3 + - uses: actions/checkout@v4 + - uses: actions/setup-python@v4 with: - python-version: 3.11 + python-version: 3.12 cache: "pip" - name: Install dependencies diff --git a/.github/workflows/push_dockerhub_dev.yml b/.github/workflows/push_dockerhub_dev.yml index dea28cdd35..1230bfc9d3 100644 --- a/.github/workflows/push_dockerhub_dev.yml +++ b/.github/workflows/push_dockerhub_dev.yml @@ -13,7 +13,7 @@ concurrency: jobs: push_dockerhub: name: Push new Docker image to Docker Hub (dev) - runs-on: ubuntu-latest + runs-on: self-hosted # Only run for the nf-core repo, for releases and merged PRs if: ${{ github.repository == 'nf-core/tools' }} env: @@ -23,7 +23,7 @@ jobs: fail-fast: false steps: - name: Check out code - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Build nfcore/tools:dev docker image run: docker build --no-cache . -t nfcore/tools:dev diff --git a/.github/workflows/push_dockerhub_release.yml b/.github/workflows/push_dockerhub_release.yml index 857b241022..49ce17dd84 100644 --- a/.github/workflows/push_dockerhub_release.yml +++ b/.github/workflows/push_dockerhub_release.yml @@ -23,7 +23,7 @@ jobs: fail-fast: false steps: - name: Check out code - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Build nfcore/tools:latest docker image run: docker build --no-cache . -t nfcore/tools:latest diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 4d9881d6bb..46ebe8e57d 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -24,37 +24,53 @@ env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} jobs: - pytest: - runs-on: ${{ matrix.runner }} + setup: + runs-on: ["ubuntu-latest"] strategy: matrix: - python-version: ["3.8", "3.11"] + python-version: ["3.8", "3.12"] runner: ["ubuntu-latest"] include: - - runner: "ubuntu-20.04" - python-version: "3.8" + - python-version: "3.8" + runner: "ubuntu-20.04" steps: - - uses: actions/checkout@v3 + - name: Check conditions + id: conditions + run: echo "run-tests=${{ github.ref == 'refs/heads/master' || (matrix.runner == 'ubuntu-20.04' && matrix.python-version == '3.8') }}" >> "$GITHUB_OUTPUT" + + outputs: + python-version: ${{ matrix.python-version }} + runner: ${{ matrix.runner }} + run-tests: ${{ steps.conditions.outputs.run-tests }} + + test: + name: Test with Python ${{ needs.setup.outputs.python-version }} on ${{ needs.setup.outputs.runner }} + needs: setup + if: ${{ needs.setup.outputs.run-tests}} + runs-on: ${{ needs.setup.outputs.runner }} + steps: + - uses: actions/checkout@v2 name: Check out source-code repository - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + - name: Set up Python ${{ needs.setup.outputs.python-version }} + uses: actions/setup-python@v2 with: - python-version: ${{ matrix.python-version }} + python-version: ${{ needs.setup.outputs.python-version }} cache: "pip" + - name: Install dependencies run: | python -m pip install --upgrade pip -r requirements-dev.txt pip install -e . - name: Downgrade git to the Ubuntu official repository's version - if: ${{ matrix.runner == 'ubuntu-20.04' && matrix.python-version == '3.8' }} + if: ${{ needs.setup.outputs.runner == 'ubuntu-20.04' && needs.setup.outputs.python-version == '3.8' }} run: | sudo apt update - sudo apt remove git git-man + sudo apt remove -y git git-man sudo add-apt-repository --remove ppa:git-core/ppa - sudo apt install git + sudo apt install -y git - name: Get current date id: date run: echo "date=$(date +'%Y-%m')" >> $GITHUB_ENV @@ -71,7 +87,7 @@ jobs: path: | /usr/local/bin/nf-test /home/runner/.nf-test/nf-test.jar - key: ${{ runner.os }}-${{ env.NFTEST_VER }}-nftest + key: ${{ runner.os }}-nftest-${{ env.date }} - name: Install nf-test if: steps.cache-software.outputs.cache-hit != 'true' diff --git a/.github/workflows/rich-codex.yml b/.github/workflows/rich-codex.yml index 54aaf240df..8368255390 100644 --- a/.github/workflows/rich-codex.yml +++ b/.github/workflows/rich-codex.yml @@ -6,7 +6,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Check out the repo - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Set up Python uses: actions/setup-python@v4 with: diff --git a/.github/workflows/sync.yml b/.github/workflows/sync.yml index fbbdacc8ab..a5cb451dc7 100644 --- a/.github/workflows/sync.yml +++ b/.github/workflows/sync.yml @@ -31,15 +31,15 @@ jobs: sync: needs: get-pipelines - runs-on: ubuntu-latest + runs-on: self-hosted strategy: matrix: ${{fromJson(needs.get-pipelines.outputs.matrix)}} fail-fast: false steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 name: Check out nf-core/tools - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 name: Check out nf-core/${{ matrix.pipeline }} with: repository: nf-core/${{ matrix.pipeline }} @@ -48,10 +48,10 @@ jobs: path: nf-core/${{ matrix.pipeline }} fetch-depth: "0" - - name: Set up Python 3.11 + - name: Set up Python 3.12 uses: actions/setup-python@v4 with: - python-version: 3.11 + python-version: 3.12 - name: Install python dependencies run: | diff --git a/.github/workflows/tools-api-docs-dev.yml b/.github/workflows/tools-api-docs-dev.yml index add939aba1..f6106bd8b5 100644 --- a/.github/workflows/tools-api-docs-dev.yml +++ b/.github/workflows/tools-api-docs-dev.yml @@ -20,16 +20,16 @@ concurrency: jobs: api-docs: name: Build & push Sphinx API docs - runs-on: ubuntu-latest + runs-on: self-hosted steps: - name: Check out source-code repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - - name: Set up Python 3.11 + - name: Set up Python 3.12 uses: actions/setup-python@v4 with: - python-version: 3.11 + python-version: 3.12 - name: Install python dependencies run: | diff --git a/.github/workflows/tools-api-docs-release.yml b/.github/workflows/tools-api-docs-release.yml index f049d74ca0..412784c233 100644 --- a/.github/workflows/tools-api-docs-release.yml +++ b/.github/workflows/tools-api-docs-release.yml @@ -19,12 +19,12 @@ jobs: - ${{ github.event.release.tag_name }} steps: - name: Check out source-code repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - - name: Set up Python 3.11 + - name: Set up Python 3.12 uses: actions/setup-python@v4 with: - python-version: 3.11 + python-version: 3.12 - name: Install python dependencies run: | diff --git a/.gitpod.yml b/.gitpod.yml index 1cc63b197f..3c8b6b5303 100644 --- a/.gitpod.yml +++ b/.gitpod.yml @@ -6,6 +6,9 @@ tasks: python -m pip install -r requirements-dev.txt pre-commit install --install-hooks nextflow self-update + - name: unset JAVA_TOOL_OPTIONS + command: | + unset JAVA_TOOL_OPTIONS vscode: extensions: # based on nf-core.nf-core-extensionpack - codezombiech.gitignore # Language support for .gitignore files diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b7aeeb5bc9..ad23a3c895 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -11,3 +11,18 @@ repos: rev: "v2.7.1" hooks: - id: prettier + - repo: https://github.com/asottile/pyupgrade + rev: v3.15.0 + hooks: + - id: pyupgrade + args: [--py38-plus] + - repo: https://github.com/pre-commit/mirrors-mypy + rev: "v1.7.1" # Use the sha / tag you want to point at + hooks: + - id: mypy + additional_dependencies: + - types-PyYAML + - types-requests + - types-jsonschema + - types-Markdown + - types-setuptools diff --git a/.prettierignore b/.prettierignore index 778fb4fb2c..344cafca6e 100644 --- a/.prettierignore +++ b/.prettierignore @@ -3,6 +3,6 @@ adaptivecard.json slackreport.json docs/api/_build testing -nf_core/module-template/modules/meta.yml -nf_core/module-template/modules/tests/tags.yml -nf_core/subworkflow-template/subworkflows/tests/tags.yml +nf_core/module-template/meta.yml +nf_core/module-template/tests/tags.yml +nf_core/subworkflow-template/tests/tags.yml diff --git a/CHANGELOG.md b/CHANGELOG.md index b8fcce59fb..b7365fa3f1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,28 +7,51 @@ - Fix writing files to a remote outdir in the NfcoreTemplate helper functions ([#2465](https://github.com/nf-core/tools/pull/2465)) - Fancier syntax highlighting for example samplesheets in the usage.md template ([#2503](https://github.com/nf-core/tools/pull/2503)) - Use closure for multiqc ext.args ([#2509](https://github.com/nf-core/tools/pull/2509)) +- Fix how the modules template references the conda environment file ([#2540](https://github.com/nf-core/tools/pull/2540)) +- Unset env variable JAVA_TOOL_OPTIONS in gitpod ([#2569](https://github.com/nf-core/tools/pull/2569)) + +### Download + +- Add `docker://` prefix for absolute container URIs as well ([#2576](https://github.com/nf-core/tools/pull/2576)). +- Bugfix for AttributeError: `ContainerError` object has no attribute `absoluteURI` ([#2543](https://github.com/nf-core/tools/pull/2543)). ### Linting - Fix incorrectly failing linting if 'modules' was not found in meta.yml ([#2447](https://github.com/nf-core/tools/pull/2447)) - Correctly pass subworkflow linting test if `COMPONENT.out.versions` is used in the script ([#2448](https://github.com/nf-core/tools/pull/2448)) +- Add pyupgrade to pre-commit config and dev requirements as mentioned in [#2200](https://github.com/nf-core/tools/issues/2200) - Check for spaces in modules container URLs ([#2452](https://github.com/nf-core/tools/issues/2452)) +- Correctly ignore `timeline.enabled`, `report.enabled`, `trace.enabled`, `dag.enabled` variables when linting a pipeline. ([#2507](https://github.com/nf-core/tools/pull/2507)) +- Lint nf-test main.nf.test tags include all used components in chained tests ([#2572](https://github.com/nf-core/tools/pull/2572)) +- Don't fail linting if md5sum for empty files are found in a stub test ([#2571](https://github.com/nf-core/tools/pull/2571)) ### Modules - Added stub test creation to `create_test_yml` ([#2476](https://github.com/nf-core/tools/pull/2476)) - Replace ModulePatch by ComponentPatch ([#2482](https://github.com/nf-core/tools/pull/2482)) - Fixed `nf-core modules lint` to work with new module structure for nf-test ([#2494](https://github.com/nf-core/tools/pull/2494)) +- Add option `--migrate-pytest` to create a module with nf-test taking into account an existing module ([#2549](https://github.com/nf-core/tools/pull/2549)) +- When installing modules and subworkflows, automatically create the `./modules` directory if it doesn't exist ([#2563](https://github.com/nf-core/tools/issues/2563)) +- When `.nf-core.yml` is not found create it in the current directory instead of the root filesystem ([#2237](https://github.com/nf-core/tools/issues/2237)) +- Modules `--migrate-pytest` copies template scripts ([#2568](https://github.com/nf-core/tools/pull/2568)) ### Subworkflows - Added stub test creation to `create_test_yml` ([#2476](https://github.com/nf-core/tools/pull/2476)) - Fixed `nf-core subworkflows lint` to work with new module structure for nf-test ([#2494](https://github.com/nf-core/tools/pull/2494)) +- Add option `--migrate-pytest` to create a subworkflow with nf-test taking into account an existing subworkflow ([#2549](https://github.com/nf-core/tools/pull/2549)) ### General +- Update `schema build` functionality to automatically update defaults which have changed in the `nextflow.config`([#2479](https://github.com/nf-core/tools/pull/2479)) - Change testing framework for modules and subworkflows from pytest to nf-test ([#2490](https://github.com/nf-core/tools/pull/2490)) - `bump_version` keeps now the indentation level of the updated version entries ([#2514](https://github.com/nf-core/tools/pull/2514)) +- Run tests with Python 3.12 ([#2522](https://github.com/nf-core/tools/pull/2522)). +- Add mypy to pre-commit config for the tools repo ([#2545](https://github.com/nf-core/tools/pull/2545)) +- Use Path objects for ComponentCreate and update the structure of components templates ([#2551](https://github.com/nf-core/tools/pull/2551)). +- GitPod base image: swap tool installation back to `conda` from `mamba` ([#2566](https://github.com/nf-core/tools/pull/2566)). +- Sort the `installed_by` list in `modules.json` ([#2570](https://github.com/nf-core/tools/pull/2570)). +- Unset env variable JAVA_TOOL_OPTIONS in gitpod ([#2569](https://github.com/nf-core/tools/pull/2569)) # [v2.10 - Nickel Ostrich](https://github.com/nf-core/tools/releases/tag/2.10) + [2023-09-25] diff --git a/README.md b/README.md index 8e6b52d4c2..6a10e1f382 100644 --- a/README.md +++ b/README.md @@ -57,7 +57,7 @@ A python package with helper tools for the nf-core community. - [Citation](#citation) The nf-core tools package is written in Python and can be imported and used within other packages. -For documentation of the internal Python functions, please refer to the [Tools Python API docs](https://nf-co.re/tools-docs/). +For documentation of the internal Python functions, please refer to the [Tools Python API docs](https://nf-co.re/tools/docs/). ## Installation @@ -76,7 +76,7 @@ conda install nf-core Alternatively, you can create a new environment with both nf-core/tools and nextflow: ```bash -conda create --name nf-core python=3.11 nf-core nextflow +conda create --name nf-core python=3.12 nf-core nextflow conda activate nf-core ``` @@ -186,7 +186,7 @@ for wf in wfs.remote_workflows: print(wf.full_name) ``` -Please see [https://nf-co.re/tools-docs/](https://nf-co.re/tools-docs/) for the function documentation. +Please see [https://nf-co.re/tools/docs/](https://nf-co.re/tools/docs/) for the function documentation. ### Automatic version check @@ -433,7 +433,7 @@ If the download speeds are much slower than your internet connection is capable Subsequently, the `*.git` folder can be moved to it's final destination and linked with a pipeline in _Tower_ using the `file:/` prefix. :::tip -Also without access to Tower, pipelines downloaded with the `--tower` flag can be run: `nextflow run -r 2.5 file:/path/to/pipelinedownload.git`. Downloads in this format allow you to include multiple revisions of a pipeline in a single file, but require that the revision (e.g. `-r 2.5`) is always explicitly specified. +Also without access to Tower, pipelines downloaded with the `--tower` flag can be run if the _absolute_ path is specified: `nextflow run -r 2.5 file:/path/to/pipelinedownload.git`. Downloads in this format allow you to include multiple revisions of a pipeline in a single file, but require that the revision (e.g. `-r 2.5`) is always explicitly specified. ::: ## Pipeline software licences @@ -533,7 +533,7 @@ Each test result name on the left is a terminal hyperlink. In most terminals you can ctrl + click ( cmd + click) these links to open documentation specific to this test in your browser. -Alternatively visit and find your test to read more. +Alternatively visit and find your test to read more. ### Linting config diff --git a/docs/api/_src/conf.py b/docs/api/_src/conf.py index 4d8ae661d5..27eaf9bcb3 100644 --- a/docs/api/_src/conf.py +++ b/docs/api/_src/conf.py @@ -14,6 +14,7 @@ # import os import sys +from typing import Dict sys.path.insert(0, os.path.abspath("../../../nf_core")) import nf_core @@ -58,7 +59,7 @@ # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = None +language: str = "en" # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. @@ -113,7 +114,7 @@ # -- Options for LaTeX output ------------------------------------------------ -latex_elements = { +latex_elements: Dict[str, str] = { # The paper size ('letterpaper' or 'a4paper'). # # 'papersize': 'letterpaper', diff --git a/mypy.ini b/mypy.ini new file mode 100644 index 0000000000..c48aa5884b --- /dev/null +++ b/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +warn_unused_configs = True +ignore_missing_imports = true diff --git a/nf_core/__main__.py b/nf_core/__main__.py index 232f1bf116..8cfacf7399 100644 --- a/nf_core/__main__.py +++ b/nf_core/__main__.py @@ -151,7 +151,7 @@ def nf_core_cli(ctx, verbose, hide_progress, log_file): # nf-core list -@nf_core_cli.command() +@nf_core_cli.command("list") @click.argument("keywords", required=False, nargs=-1, metavar="") @click.option( "-s", @@ -162,7 +162,7 @@ def nf_core_cli(ctx, verbose, hide_progress, log_file): ) @click.option("--json", is_flag=True, default=False, help="Print full output as JSON") @click.option("--show-archived", is_flag=True, default=False, help="Print archived workflows") -def list(keywords, sort, json, show_archived): +def list_pipelines(keywords, sort, json, show_archived): """ List available nf-core pipelines with local info. @@ -448,7 +448,7 @@ def lint(ctx, dir, release, fix, key, show_passed, fail_ignored, fail_warned, ma Runs a large number of automated tests to ensure that the supplied pipeline meets the nf-core guidelines. Documentation of all lint tests can be found - on the nf-core website: [link=https://nf-co.re/tools-docs/]https://nf-co.re/tools-docs/[/] + on the nf-core website: [link=https://nf-co.re/tools/docs/]https://nf-co.re/tools/docs/[/] You can ignore tests using a file called [blue].nf-core.yml[/] [i](if you have a good reason!)[/]. See the documentation for details. @@ -553,9 +553,9 @@ def subworkflows(ctx, git_remote, branch, no_pull): # nf-core modules list subcommands -@modules.group() +@modules.group("list") @click.pass_context -def list(ctx): +def modules_list(ctx): """ List modules in a local pipeline or remote repository. """ @@ -563,11 +563,11 @@ def list(ctx): # nf-core modules list remote -@list.command() +@modules_list.command("remote") @click.pass_context @click.argument("keywords", required=False, nargs=-1, metavar="") @click.option("-j", "--json", is_flag=True, help="Print as JSON to stdout") -def remote(ctx, keywords, json): +def modules_list_remote(ctx, keywords, json): """ List modules in a remote GitHub repo [dim i](e.g [link=https://github.com/nf-core/modules]nf-core/modules[/])[/]. """ @@ -588,7 +588,7 @@ def remote(ctx, keywords, json): # nf-core modules list local -@list.command() +@modules_list.command("local") @click.pass_context @click.argument("keywords", required=False, nargs=-1, metavar="") @click.option("-j", "--json", is_flag=True, help="Print as JSON to stdout") @@ -599,7 +599,7 @@ def remote(ctx, keywords, json): default=".", help=r"Pipeline directory. [dim]\[default: Current working directory][/]", ) -def local(ctx, keywords, json, dir): # pylint: disable=redefined-builtin +def modules_list_local(ctx, keywords, json, dir): # pylint: disable=redefined-builtin """ List modules installed locally in a pipeline """ @@ -620,7 +620,7 @@ def local(ctx, keywords, json, dir): # pylint: disable=redefined-builtin # nf-core modules install -@modules.command() +@modules.command("install") @click.pass_context @click.argument("tool", type=str, required=False, metavar=" or ") @click.option( @@ -633,7 +633,7 @@ def local(ctx, keywords, json, dir): # pylint: disable=redefined-builtin @click.option("-p", "--prompt", is_flag=True, default=False, help="Prompt for the version of the module") @click.option("-f", "--force", is_flag=True, default=False, help="Force reinstallation of module if it already exists") @click.option("-s", "--sha", type=str, metavar="", help="Install module at commit SHA") -def install(ctx, tool, dir, prompt, force, sha): +def modules_install(ctx, tool, dir, prompt, force, sha): """ Install DSL2 modules within a pipeline. @@ -660,7 +660,7 @@ def install(ctx, tool, dir, prompt, force, sha): # nf-core modules update -@modules.command() +@modules.command("update") @click.pass_context @click.argument("tool", type=str, required=False, metavar=" or ") @click.option( @@ -699,7 +699,7 @@ def install(ctx, tool, dir, prompt, force, sha): default=False, help="Automatically update all linked modules and subworkflows without asking for confirmation", ) -def update(ctx, tool, directory, force, prompt, sha, install_all, preview, save_diff, update_deps): +def modules_update(ctx, tool, directory, force, prompt, sha, install_all, preview, save_diff, update_deps): """ Update DSL2 modules within a pipeline. @@ -767,7 +767,7 @@ def patch(ctx, tool, dir, remove): # nf-core modules remove -@modules.command() +@modules.command("remove") @click.pass_context @click.argument("tool", type=str, required=False, metavar=" or ") @click.option( @@ -777,7 +777,7 @@ def patch(ctx, tool, dir, remove): default=".", help=r"Pipeline directory. [dim]\[default: current working directory][/]", ) -def remove(ctx, dir, tool): +def modules_remove(ctx, dir, tool): """ Remove a module from a pipeline. """ @@ -815,8 +815,20 @@ def remove(ctx, dir, tool): default=False, help="Create a module from the template without TODOs or examples", ) +@click.option("--migrate-pytest", is_flag=True, default=False, help="Migrate a module with pytest tests to nf-test") def create_module( - ctx, tool, dir, author, label, meta, no_meta, force, conda_name, conda_package_version, empty_template + ctx, + tool, + dir, + author, + label, + meta, + no_meta, + force, + conda_name, + conda_package_version, + empty_template, + migrate_pytest, ): """ Create a new DSL2 module from the nf-core template. @@ -841,7 +853,7 @@ def create_module( # Run function try: module_create = ModuleCreate( - dir, tool, author, label, has_meta, force, conda_name, conda_package_version, empty_template + dir, tool, author, label, has_meta, force, conda_name, conda_package_version, empty_template, migrate_pytest ) module_create.create() except UserWarning as e: @@ -887,7 +899,7 @@ def test_module(ctx, tool, dir, no_prompts, update, once): # nf-core modules lint -@modules.command() +@modules.command("lint") @click.pass_context @click.argument("tool", type=str, required=False, metavar=" or ") @click.option("-d", "--dir", type=click.Path(exists=True), default=".", metavar="") @@ -912,9 +924,7 @@ def test_module(ctx, tool, dir, no_prompts, update, once): show_default=True, ) @click.option("--fix-version", is_flag=True, help="Fix the module version if a newer version is available") -def lint( - ctx, tool, dir, registry, key, all, fail_warned, local, passed, sort_by, fix_version -): # pylint: disable=redefined-outer-name +def modules_lint(ctx, tool, dir, registry, key, all, fail_warned, local, passed, sort_by, fix_version): """ Lint one or more modules in a directory. @@ -959,7 +969,7 @@ def lint( # nf-core modules info -@modules.command() +@modules.command("info") @click.pass_context @click.argument("tool", type=str, required=False, metavar=" or ") @click.option( @@ -969,7 +979,7 @@ def lint( default=".", help=r"Pipeline directory. [dim]\[default: Current working directory][/]", ) -def info(ctx, tool, dir): +def modules_info(ctx, tool, dir): """ Show developer usage information about a given module. @@ -1035,7 +1045,8 @@ def bump_versions(ctx, tool, dir, all, show_all): @click.option("-d", "--dir", type=click.Path(exists=True), default=".", metavar="") @click.option("-a", "--author", type=str, metavar="", help="Module author's GitHub username prefixed with '@'") @click.option("-f", "--force", is_flag=True, default=False, help="Overwrite any files if they already exist") -def create_subworkflow(ctx, subworkflow, dir, author, force): +@click.option("--migrate-pytest", is_flag=True, default=False, help="Migrate a module with pytest tests to nf-test") +def create_subworkflow(ctx, subworkflow, dir, author, force, migrate_pytest): """ Create a new subworkflow from the nf-core template. @@ -1049,7 +1060,7 @@ def create_subworkflow(ctx, subworkflow, dir, author, force): # Run function try: - subworkflow_create = SubworkflowCreate(dir, subworkflow, author, force) + subworkflow_create = SubworkflowCreate(dir, subworkflow, author, force, migrate_pytest) subworkflow_create.create() except UserWarning as e: log.critical(e) @@ -1064,7 +1075,6 @@ def create_subworkflow(ctx, subworkflow, dir, author, force): @click.pass_context @click.argument("subworkflow", type=str, required=False, metavar="subworkflow name") @click.option("-d", "--dir", type=click.Path(exists=True), default=".", metavar="") -@click.option("-t", "--run-tests", is_flag=True, default=False, help="Run the test workflows") @click.option("-p", "--no-prompts", is_flag=True, default=False, help="Use defaults without prompting") @click.option("-u", "--update", is_flag=True, default=False, help="Update existing snapshots") @click.option("-o", "--once", is_flag=True, default=False, help="Run tests only once. Don't check snapshot stability") @@ -1095,9 +1105,9 @@ def test_subworkflow(ctx, subworkflow, dir, no_prompts, update, once): # nf-core subworkflows list subcommands -@subworkflows.group() +@subworkflows.group("list") @click.pass_context -def list(ctx): +def subworkflows_list(ctx): """ List subworkflows in a local pipeline or remote repository. """ @@ -1105,11 +1115,11 @@ def list(ctx): # nf-core subworkflows list remote -@list.command() +@subworkflows_list.command("remote") @click.pass_context @click.argument("keywords", required=False, nargs=-1, metavar="") @click.option("-j", "--json", is_flag=True, help="Print as JSON to stdout") -def remote(ctx, keywords, json): +def subworkflows_list_remote(ctx, keywords, json): """ List subworkflows in a remote GitHub repo [dim i](e.g [link=https://github.com/nf-core/modules]nf-core/modules[/])[/]. """ @@ -1131,7 +1141,7 @@ def remote(ctx, keywords, json): # nf-core subworkflows list local -@list.command() +@subworkflows_list.command("local") @click.pass_context @click.argument("keywords", required=False, nargs=-1, metavar="") @click.option("-j", "--json", is_flag=True, help="Print as JSON to stdout") @@ -1142,7 +1152,7 @@ def remote(ctx, keywords, json): default=".", help=r"Pipeline directory. [dim]\[default: Current working directory][/]", ) -def local(ctx, keywords, json, dir): # pylint: disable=redefined-builtin +def subworkflows_list_local(ctx, keywords, json, dir): # pylint: disable=redefined-builtin """ List subworkflows installed locally in a pipeline """ @@ -1163,7 +1173,7 @@ def local(ctx, keywords, json, dir): # pylint: disable=redefined-builtin # nf-core subworkflows lint -@subworkflows.command() +@subworkflows.command("lint") @click.pass_context @click.argument("subworkflow", type=str, required=False, metavar="subworkflow name") @click.option("-d", "--dir", type=click.Path(exists=True), default=".", metavar="") @@ -1187,9 +1197,7 @@ def local(ctx, keywords, json, dir): # pylint: disable=redefined-builtin help="Sort lint output by subworkflow or test name.", show_default=True, ) -def lint( - ctx, subworkflow, dir, registry, key, all, fail_warned, local, passed, sort_by -): # pylint: disable=redefined-outer-name +def subworkflows_lint(ctx, subworkflow, dir, registry, key, all, fail_warned, local, passed, sort_by): """ Lint one or more subworkflows in a directory. @@ -1233,7 +1241,7 @@ def lint( # nf-core subworkflows info -@subworkflows.command() +@subworkflows.command("info") @click.pass_context @click.argument("tool", type=str, required=False, metavar="subworkflow name") @click.option( @@ -1243,7 +1251,7 @@ def lint( default=".", help=r"Pipeline directory. [dim]\[default: Current working directory][/]", ) -def info(ctx, tool, dir): +def subworkflows_info(ctx, tool, dir): """ Show developer usage information about a given subworkflow. @@ -1272,7 +1280,7 @@ def info(ctx, tool, dir): # nf-core subworkflows install -@subworkflows.command() +@subworkflows.command("install") @click.pass_context @click.argument("subworkflow", type=str, required=False, metavar="subworkflow name") @click.option( @@ -1287,7 +1295,7 @@ def info(ctx, tool, dir): "-f", "--force", is_flag=True, default=False, help="Force reinstallation of subworkflow if it already exists" ) @click.option("-s", "--sha", type=str, metavar="", help="Install subworkflow at commit SHA") -def install(ctx, subworkflow, dir, prompt, force, sha): +def subworkflows_install(ctx, subworkflow, dir, prompt, force, sha): """ Install DSL2 subworkflow within a pipeline. @@ -1314,7 +1322,7 @@ def install(ctx, subworkflow, dir, prompt, force, sha): # nf-core subworkflows remove -@subworkflows.command() +@subworkflows.command("remove") @click.pass_context @click.argument("subworkflow", type=str, required=False, metavar="subworkflow name") @click.option( @@ -1324,7 +1332,7 @@ def install(ctx, subworkflow, dir, prompt, force, sha): default=".", help=r"Pipeline directory. [dim]\[default: current working directory][/]", ) -def remove(ctx, dir, subworkflow): +def subworkflows_remove(ctx, dir, subworkflow): """ Remove a subworkflow from a pipeline. """ @@ -1344,7 +1352,7 @@ def remove(ctx, dir, subworkflow): # nf-core subworkflows update -@subworkflows.command() +@subworkflows.command("update") @click.pass_context @click.argument("subworkflow", type=str, required=False, metavar="subworkflow name") @click.option( @@ -1382,7 +1390,7 @@ def remove(ctx, dir, subworkflow): default=False, help="Automatically update all linked modules and subworkflows without asking for confirmation", ) -def update(ctx, subworkflow, dir, force, prompt, sha, install_all, preview, save_diff, update_deps): +def subworkflows_update(ctx, subworkflow, dir, force, prompt, sha, install_all, preview, save_diff, update_deps): """ Update DSL2 subworkflow within a pipeline. @@ -1396,7 +1404,7 @@ def update(ctx, subworkflow, dir, force, prompt, sha, install_all, preview, save force, prompt, sha, - all, + install_all, preview, save_diff, update_deps, @@ -1496,11 +1504,11 @@ def build(dir, no_prompts, web_only, url): # nf-core schema lint -@schema.command() +@schema.command("lint") @click.argument( "schema_path", type=click.Path(exists=True), default="nextflow_schema.json", metavar="" ) -def lint(schema_path): +def schema_lint(schema_path): """ Check that a given pipeline schema is valid. diff --git a/nf_core/bump_version.py b/nf_core/bump_version.py index 83979ed4e0..40c8f8984f 100644 --- a/nf_core/bump_version.py +++ b/nf_core/bump_version.py @@ -32,7 +32,8 @@ def bump_pipeline_version(pipeline_obj: Pipeline, new_version: str) -> None: new_version = new_version[1:] if not current_version: raise UserWarning("Could not find config variable 'manifest.version'") - + if current_version == new_version: + raise UserWarning(f"Current version is already: {current_version}") log.info(f"Changing version number from '{current_version}' to '{new_version}'") # nextflow.config - workflow manifest version @@ -48,17 +49,47 @@ def bump_pipeline_version(pipeline_obj: Pipeline, new_version: str) -> None: ) # multiqc_config.yaml multiqc_new_version = "dev" if "dev" in new_version else new_version + multiqc_current_version = "dev" if "dev" in current_version else current_version + if multiqc_current_version != "dev" and multiqc_new_version != "dev": + update_file_version( + Path("assets", "multiqc_config.yml"), + pipeline_obj, + [ + ( + f"/releases/tag/{current_version}", + f"/releases/tag/{new_version}", + ) + ], + ) + if multiqc_current_version != "dev" and multiqc_new_version == "dev": + update_file_version( + Path("assets", "multiqc_config.yml"), + pipeline_obj, + [ + ( + f"/releases/tag/{current_version}", + f"/tree/dev", + ) + ], + ) + if multiqc_current_version == "dev" and multiqc_new_version != "dev": + update_file_version( + Path("assets", "multiqc_config.yml"), + pipeline_obj, + [ + ( + f"/tree/dev", + f"/releases/tag/{multiqc_new_version}", + ) + ], + ) update_file_version( Path("assets", "multiqc_config.yml"), pipeline_obj, [ ( - "/dev", - f"/{multiqc_new_version}", - ), - ( - rf"{re.escape(current_version)}", - f"{multiqc_new_version}", + f"/{multiqc_current_version}/", + f"/{multiqc_new_version}/", ), ], ) @@ -187,7 +218,7 @@ def update_file_version(filename: Union[str, Path], pipeline_obj: Pipeline, patt if found_match: content = "\n".join(newcontent) + "\n" else: - log.error(f"Could not find version number in {filename}: '{pattern}'") + log.error(f"Could not find version number in {filename}: `{pattern}`") log.info(f"Updated version in '{filename}'") for replacement in replacements: diff --git a/nf_core/components/components_test.py b/nf_core/components/components_test.py index 689a6819eb..3294c2878b 100644 --- a/nf_core/components/components_test.py +++ b/nf_core/components/components_test.py @@ -2,7 +2,6 @@ The ComponentsTest class handles the generation and testing of nf-test snapshots. """ -from __future__ import print_function import logging import os @@ -19,7 +18,6 @@ import nf_core.utils from nf_core.components.components_command import ComponentCommand -from tests.utils import set_wd log = logging.getLogger(__name__) @@ -92,7 +90,7 @@ def run(self) -> None: os.environ[ "NFT_DIFF_ARGS" ] = "--line-numbers --expand-tabs=2" # taken from https://code.askimed.com/nf-test/docs/assertions/snapshots/#snapshot-differences - with set_wd(Path(self.dir)): + with nf_core.utils.set_wd(Path(self.dir)): self.check_snapshot_stability() if len(self.errors) > 0: errors = "\n - ".join(self.errors) @@ -191,10 +189,11 @@ def generate_snapshot(self) -> bool: verbose = "--verbose --debug" if self.verbose else "" update = "--update-snapshot" if self.update else "" self.update = False # reset self.update to False to test if the new snapshot is stable + tag = f"subworkflows/{self.component_name}" if self.component_type == "subworkflows" else self.component_name result = nf_core.utils.run_cmd( "nf-test", - f"test --tag {self.component_name} --profile {os.environ['PROFILE']} {verbose} {update}", + f"test --tag {tag} --profile {os.environ['PROFILE']} {verbose} {update}", ) if result is not None: nftest_out, nftest_err = result diff --git a/nf_core/components/create.py b/nf_core/components/create.py index 54fade29a2..568ca22af5 100644 --- a/nf_core/components/create.py +++ b/nf_core/components/create.py @@ -7,8 +7,8 @@ import glob import json import logging -import os import re +import shutil import subprocess from pathlib import Path from typing import Dict, Optional @@ -16,11 +16,13 @@ import jinja2 import questionary import rich +import yaml from packaging.version import parse as parse_version import nf_core import nf_core.utils from nf_core.components.components_command import ComponentCommand +from nf_core.lint_utils import run_prettier_on_file log = logging.getLogger(__name__) @@ -38,6 +40,7 @@ def __init__( conda_name: Optional[str] = None, conda_version: Optional[str] = None, empty_template: bool = False, + migrate_pytest: bool = False, ): super().__init__(component_type, directory) self.directory = directory @@ -56,8 +59,9 @@ def __init__( self.bioconda = None self.singularity_container = None self.docker_container = None - self.file_paths: Dict[str, str] = {} + self.file_paths: Dict[str, Path] = {} self.not_empty_template = not empty_template + self.migrate_pytest = migrate_pytest def create(self): """ @@ -125,32 +129,47 @@ def create(self): # Determine the component name self.component_name = self.component - self.component_dir = self.component + self.component_dir = Path(self.component) if self.subtool: self.component_name = f"{self.component}/{self.subtool}" - self.component_dir = os.path.join(self.component, self.subtool) + self.component_dir = Path(self.component, self.subtool) self.component_name_underscore = self.component_name.replace("/", "_") # Check existence of directories early for fast-fail self.file_paths = self._get_component_dirs() - if self.component_type == "modules": - # Try to find a bioconda package for 'component' - self._get_bioconda_tool() + if self.migrate_pytest: + # Rename the component directory to old + component_old_dir = Path(str(self.component_dir) + "_old") + component_parent_path = Path(self.directory, self.component_type, self.org) + component_old_path = component_parent_path / component_old_dir + component_path = component_parent_path / self.component_dir - # Prompt for GitHub username - self._get_username() + component_path.rename(component_old_path) + else: + if self.component_type == "modules": + # Try to find a bioconda package for 'component' + self._get_bioconda_tool() - if self.component_type == "modules": - self._get_module_structure_components() + # Prompt for GitHub username + self._get_username() + + if self.component_type == "modules": + self._get_module_structure_components() # Create component template with jinja2 self._render_template() log.info(f"Created component template: '{self.component_name}'") - new_files = list(self.file_paths.values()) + if self.migrate_pytest: + self._copy_old_files(component_old_path) + log.info("Migrate pytest tests: Copied original module files to new module") + shutil.rmtree(component_old_path) + self._print_and_delete_pytest_files() + + new_files = [str(path) for path in self.file_paths.values()] log.info("Created following files:\n " + "\n ".join(new_files)) def _get_bioconda_tool(self): @@ -259,16 +278,16 @@ def _render_template(self): # Write output to the target file log.debug(f"Writing output to: '{dest_fn}'") - os.makedirs(os.path.dirname(dest_fn), exist_ok=True) + dest_fn.parent.mkdir(exist_ok=True, parents=True) with open(dest_fn, "w") as fh: log.debug(f"Writing output to: '{dest_fn}'") fh.write(rendered_output) # Mirror file permissions - template_stat = os.stat( - os.path.join(os.path.dirname(nf_core.__file__), f"{self.component_type[:-1]}-template", template_fn) - ) - os.chmod(dest_fn, template_stat.st_mode) + template_stat = ( + Path(nf_core.__file__).parent / f"{self.component_type[:-1]}-template" / template_fn + ).stat() + dest_fn.chmod(template_stat.st_mode) def _collect_name_prompt(self): """ @@ -319,17 +338,17 @@ def _get_component_dirs(self): """ file_paths = {} if self.repo_type == "pipeline": - local_component_dir = os.path.join(self.directory, self.component_type, "local") + local_component_dir = Path(self.directory, self.component_type, "local") # Check whether component file already exists - component_file = os.path.join(local_component_dir, f"{self.component_name}.nf") - if os.path.exists(component_file) and not self.force_overwrite: + component_file = local_component_dir / f"{self.component_name}.nf" + if component_file.exists() and not self.force_overwrite: raise UserWarning( f"{self.component_type[:-1].title()} file exists already: '{component_file}'. Use '--force' to overwrite" ) if self.component_type == "modules": # If a subtool, check if there is a module called the base tool name already - if self.subtool and os.path.exists(os.path.join(local_component_dir, f"{self.component}.nf")): + if self.subtool and (local_component_dir / f"{self.component}.nf").exists(): raise UserWarning( f"Module '{self.component}' exists already, cannot make subtool '{self.component_name}'" ) @@ -342,50 +361,42 @@ def _get_component_dirs(self): ) # Set file paths - file_paths[os.path.join(self.component_type, "main.nf")] = component_file + file_paths["main.nf"] = component_file if self.repo_type == "modules": - component_dir = os.path.join(self.directory, self.component_type, self.org, self.component_dir) + component_dir = Path(self.directory, self.component_type, self.org, self.component_dir) # Check if module/subworkflow directories exist already - if os.path.exists(component_dir) and not self.force_overwrite: + if component_dir.exists() and not self.force_overwrite and not self.migrate_pytest: raise UserWarning( f"{self.component_type[:-1]} directory exists: '{component_dir}'. Use '--force' to overwrite" ) if self.component_type == "modules": # If a subtool, check if there is a module called the base tool name already - parent_tool_main_nf = os.path.join( - self.directory, self.component_type, self.org, self.component, "main.nf" - ) - if self.subtool and os.path.exists(parent_tool_main_nf): + parent_tool_main_nf = Path(self.directory, self.component_type, self.org, self.component, "main.nf") + if self.subtool and parent_tool_main_nf.exists() and not self.migrate_pytest: raise UserWarning( f"Module '{parent_tool_main_nf}' exists already, cannot make subtool '{self.component_name}'" ) # If no subtool, check that there isn't already a tool/subtool tool_glob = glob.glob( - f"{os.path.join(self.directory, self.component_type, self.org, self.component)}/*/main.nf" + f"{Path(self.directory, self.component_type, self.org, self.component)}/*/main.nf" ) - if not self.subtool and tool_glob: + if not self.subtool and tool_glob and not self.migrate_pytest: raise UserWarning( f"Module subtool '{tool_glob[0]}' exists already, cannot make tool '{self.component_name}'" ) # Set file paths # For modules - can be tool/ or tool/subtool/ so can't do in template directory structure - file_paths[os.path.join(self.component_type, "main.nf")] = os.path.join(component_dir, "main.nf") - file_paths[os.path.join(self.component_type, "meta.yml")] = os.path.join(component_dir, "meta.yml") + file_paths["main.nf"] = component_dir / "main.nf" + file_paths["meta.yml"] = component_dir / "meta.yml" if self.component_type == "modules": - file_paths[os.path.join(self.component_type, "environment.yml")] = os.path.join( - component_dir, "environment.yml" - ) - file_paths[os.path.join(self.component_type, "tests", "tags.yml")] = os.path.join( - component_dir, "tests", "tags.yml" - ) - file_paths[os.path.join(self.component_type, "tests", "main.nf.test")] = os.path.join( - component_dir, "tests", "main.nf.test" - ) + file_paths["environment.yml"] = component_dir / "environment.yml" + file_paths["tests/tags.yml"] = component_dir / "tests" / "tags.yml" + file_paths["tests/main.nf.test"] = component_dir / "tests" / "main.nf.test" return file_paths @@ -396,8 +407,7 @@ def _get_username(self): # Try to guess the current user if `gh` is installed author_default = None try: - with open(os.devnull, "w") as devnull: - gh_auth_user = json.loads(subprocess.check_output(["gh", "api", "/user"], stderr=devnull)) + gh_auth_user = json.loads(subprocess.check_output(["gh", "api", "/user"], stderr=subprocess.DEVNULL)) author_default = f"@{gh_auth_user['login']}" except Exception as e: log.debug(f"Could not find GitHub username using 'gh' cli command: [red]{e}") @@ -411,3 +421,65 @@ def _get_username(self): f"[violet]GitHub Username:[/]{' (@author)' if author_default is None else ''}", default=author_default, ) + + def _copy_old_files(self, component_old_path): + """Copy files from old module to new module""" + log.debug("Copying original main.nf file") + shutil.copyfile(component_old_path / "main.nf", self.file_paths["main.nf"]) + log.debug("Copying original meta.yml file") + shutil.copyfile(component_old_path / "meta.yml", self.file_paths["meta.yml"]) + if self.component_type == "modules": + log.debug("Copying original environment.yml file") + shutil.copyfile(component_old_path / "environment.yml", self.file_paths["environment.yml"]) + if (component_old_path / "templates").is_dir(): + log.debug("Copying original templates directory") + shutil.copytree( + component_old_path / "templates", self.file_paths["environment.yml"].parent / "templates" + ) + # Create a nextflow.config file if it contains information other than publishDir + pytest_dir = Path(self.directory, "tests", self.component_type, self.org, self.component_dir) + nextflow_config = pytest_dir / "nextflow.config" + if nextflow_config.is_file(): + with open(nextflow_config, "r") as fh: + config_lines = "" + for line in fh: + if "publishDir" not in line: + config_lines += line + if len(config_lines) > 0: + log.debug("Copying nextflow.config file from pytest tests") + with open( + Path(self.directory, self.component_type, self.org, self.component_dir, "tests", "nextflow.config"), + "w+", + ) as ofh: + ofh.write(config_lines) + + def _print_and_delete_pytest_files(self): + """Prompt if pytest files should be deleted and printed to stdout""" + pytest_dir = Path(self.directory, "tests", self.component_type, self.org, self.component_dir) + if rich.prompt.Confirm.ask( + "[violet]Do you want to delete the pytest files?[/]\nPytest file 'main.nf' will be printed to standard output to allow migrating the tests manually to 'main.nf.test'.", + default=False, + ): + with open(pytest_dir / "main.nf", "r") as fh: + log.info(fh.read()) + shutil.rmtree(pytest_dir) + log.info( + "[yellow]Please convert the pytest tests to nf-test in 'main.nf.test'.[/]\n" + "You can find more information about nf-test [link=https://nf-co.re/docs/contributing/modules#migrating-from-pytest-to-nf-test]at the nf-core web[/link]. " + ) + else: + log.info( + "[yellow]Please migrate the pytest tests to nf-test in 'main.nf.test'.[/]\n" + "You can find more information about nf-test [link=https://nf-co.re/docs/contributing/modules#migrating-from-pytest-to-nf-test]at the nf-core web[/link].\n" + f"Once done, make sure to delete the module pytest files to avoid linting errors: {pytest_dir}" + ) + # Delete tags from pytest_modules.yml + modules_yml = Path(self.directory, "tests", "config", "pytest_modules.yml") + with open(modules_yml, "r") as fh: + yml_file = yaml.safe_load(fh) + yml_key = str(self.component_dir) if self.component_type == "modules" else f"subworkflows/{self.component_dir}" + if yml_key in yml_file: + del yml_file[yml_key] + with open(modules_yml, "w") as fh: + yaml.dump(yml_file, fh) + run_prettier_on_file(modules_yml) diff --git a/nf_core/components/install.py b/nf_core/components/install.py index f7a5fe6680..6385ee4092 100644 --- a/nf_core/components/install.py +++ b/nf_core/components/install.py @@ -127,6 +127,8 @@ def install(self, component, silent=False): self.install_included_components(component_dir) if not silent: + modules_json.load() + modules_json.dump(run_prettier=True) # Print include statement component_name = "_".join(component.upper().split("/")) log.info(f"Use the following statement to include this {self.component_type[:-1]}:") diff --git a/nf_core/components/lint/__init__.py b/nf_core/components/lint/__init__.py index c46d68ff22..efffc28e85 100644 --- a/nf_core/components/lint/__init__.py +++ b/nf_core/components/lint/__init__.py @@ -10,7 +10,9 @@ import os from pathlib import Path -import rich +import rich.box +import rich.console +import rich.panel from rich.markdown import Markdown from rich.table import Table @@ -209,7 +211,7 @@ def _print_results(self, show_passed=False, sort_by="test"): self.failed.sort(key=operator.attrgetter(*sort_order)) # Find maximum module name length - max_name_len = 40 + max_name_len = len(self.component_type[:-1] + " name") for tests in [self.passed, self.warned, self.failed]: try: for lint_result in tests: @@ -264,7 +266,7 @@ def format_result(test_results, table): table = Table(style="yellow", box=rich.box.MINIMAL, pad_edge=False, border_style="dim") table.add_column(f"{self.component_type[:-1].title()} name", width=max_name_len) table.add_column("File path") - table.add_column("Test message") + table.add_column("Test message", overflow="fold") table = format_result(self.warned, table) console.print( rich.panel.Panel( @@ -278,10 +280,15 @@ def format_result(test_results, table): # Table of failing tests if len(self.failed) > 0: - table = Table(style="red", box=rich.box.MINIMAL, pad_edge=False, border_style="dim") + table = Table( + style="red", + box=rich.box.MINIMAL, + pad_edge=False, + border_style="dim", + ) table.add_column(f"{self.component_type[:-1].title()} name", width=max_name_len) table.add_column("File path") - table.add_column("Test message") + table.add_column("Test message", overflow="fold") table = format_result(self.failed, table) console.print( rich.panel.Panel( diff --git a/nf_core/components/list.py b/nf_core/components/list.py index d05c6d84a5..47c0eaad62 100644 --- a/nf_core/components/list.py +++ b/nf_core/components/list.py @@ -2,7 +2,7 @@ import logging from typing import Dict, List, Optional, Tuple, Union, cast -import rich +import rich.table from nf_core.components.components_command import ComponentCommand from nf_core.modules.modules_json import ModulesJson @@ -24,7 +24,7 @@ def __init__( super().__init__(component_type, pipeline_dir, remote_url, branch, no_pull) self.remote = remote - def list_components(self, keywords: Optional[List[str]] = None, print_json=False) -> rich.table.Table: + def list_components(self, keywords: Optional[List[str]] = None, print_json=False) -> Union[rich.table.Table, str]: keywords = keywords or [] """ Get available modules/subworkflows names from GitHub tree for repo @@ -38,7 +38,7 @@ def list_components(self, keywords: Optional[List[str]] = None, print_json=False table.add_column(f"{self.component_type[:-1].capitalize()} Name") components: List[str] = [] - def pattern_msg(keywords: List[str]): + def pattern_msg(keywords: List[str]) -> str: if len(keywords) == 0: return "" if len(keywords) == 1: @@ -107,37 +107,40 @@ def pattern_msg(keywords: List[str]): table.add_column("Date") # Load 'modules.json' - modules_json = modules_json.modules_json + modules_json_file = modules_json.modules_json for repo_url, component_with_dir in sorted(repos_with_comps.items()): repo_entry: Dict[str, Dict[str, Dict[str, Dict[str, Union[str, List[str]]]]]] - - repo_entry = modules_json["repos"].get(repo_url, {}) - for install_dir, component in sorted(component_with_dir): - # Use cast() to predict the return type of recursive get():s - repo_modules = cast(dict, repo_entry.get(self.component_type)) - component_entry = cast(dict, cast(dict, repo_modules.get(install_dir)).get(component)) - - if component_entry: - version_sha = component_entry["git_sha"] - try: - # pass repo_name to get info on modules even outside nf-core/modules - message, date = ModulesRepo( - remote_url=repo_url, - branch=component_entry["branch"], - ).get_commit_info(version_sha) - except LookupError as e: - log.warning(e) + if modules_json_file is None: + log.warning(f"Modules JSON file '{modules_json.modules_json_path}' is missing. ") + continue + else: + repo_entry = modules_json_file["repos"].get(repo_url, {}) + for install_dir, component in sorted(component_with_dir): + # Use cast() to predict the return type of recursive get():s + repo_modules = cast(dict, repo_entry.get(self.component_type)) + component_entry = cast(dict, cast(dict, repo_modules.get(install_dir)).get(component)) + + if component_entry: + version_sha = component_entry["git_sha"] + try: + # pass repo_name to get info on modules even outside nf-core/modules + message, date = ModulesRepo( + remote_url=repo_url, + branch=component_entry["branch"], + ).get_commit_info(version_sha) + except LookupError as e: + log.warning(e) + date = "[red]Not Available" + message = "[red]Not Available" + else: + log.warning( + f"Commit SHA for {self.component_type[:-1]} '{install_dir}/{self.component_type}' is missing from 'modules.json'" + ) + version_sha = "[red]Not Available" date = "[red]Not Available" message = "[red]Not Available" - else: - log.warning( - f"Commit SHA for {self.component_type[:-1]} '{install_dir}/{self.component_type}' is missing from 'modules.json'" - ) - version_sha = "[red]Not Available" - date = "[red]Not Available" - message = "[red]Not Available" - table.add_row(component, repo_url, version_sha, message, date) + table.add_row(component, repo_url, version_sha, message, date) if print_json: return json.dumps(components, sort_keys=True, indent=4) diff --git a/nf_core/components/nfcore_component.py b/nf_core/components/nfcore_component.py index 94e7584a9b..874fa570bc 100644 --- a/nf_core/components/nfcore_component.py +++ b/nf_core/components/nfcore_component.py @@ -1,7 +1,12 @@ """ The NFCoreComponent class holds information and utility functions for a single module or subworkflow """ +import logging +import re from pathlib import Path +from typing import Union + +log = logging.getLogger(__name__) class NFCoreComponent: @@ -44,16 +49,16 @@ def __init__( if remote_component: # Initialize the important files - self.main_nf = self.component_dir / "main.nf" - self.meta_yml = self.component_dir / "meta.yml" + self.main_nf = Path(self.component_dir, "main.nf") + self.meta_yml = Path(self.component_dir, "meta.yml") self.process_name = "" - self.environment_yml = self.component_dir / "environment.yml" + self.environment_yml = Path(self.component_dir, "environment.yml") repo_dir = self.component_dir.parts[: self.component_dir.parts.index(self.component_name.split("/")[0])][-1] self.org = repo_dir - self.nftest_testdir = self.component_dir / "tests" - self.nftest_main_nf = self.nftest_testdir / "main.nf.test" - self.tags_yml = self.nftest_testdir / "tags.yml" + self.nftest_testdir = Path(self.component_dir, "tests") + self.nftest_main_nf = Path(self.nftest_testdir, "main.nf.test") + self.tags_yml = Path(self.nftest_testdir, "tags.yml") if self.repo_type == "pipeline": patch_fn = f"{self.component_name.replace('/', '-')}.diff" @@ -73,7 +78,7 @@ def __init__( self.test_yml = None self.test_main_nf = None - def _get_main_nf_tags(self, test_main_nf: str): + def _get_main_nf_tags(self, test_main_nf: Union[Path, str]): """Collect all tags from the main.nf.test file.""" tags = [] with open(test_main_nf, "r") as fh: @@ -82,11 +87,107 @@ def _get_main_nf_tags(self, test_main_nf: str): tags.append(line.strip().split()[1].strip('"')) return tags - def _get_included_components(self, main_nf: str): + def _get_included_components(self, main_nf: Union[Path, str]): """Collect all included components from the main.nf file.""" included_components = [] with open(main_nf, "r") as fh: for line in fh: if line.strip().startswith("include"): - included_components.append(line.strip().split()[-1].split(self.org)[-1].split("main")[0].strip("/")) + # get tool/subtool or subworkflow name from include statement, can be in the form + #'../../../modules/nf-core/hisat2/align/main' + #'../bam_sort_stats_samtools/main' + #'../subworkflows/nf-core/bam_sort_stats_samtools/main' + #'plugin/nf-validation' + component = line.strip().split()[-1].split(self.org)[-1].split("main")[0].strip("/") + component = component.replace("'../", "subworkflows/") + component = component.replace("'", "") + included_components.append(component) + return included_components + + def _get_included_components_in_chained_tests(self, main_nf_test: Union[Path, str]): + """Collect all included components from the main.nf file.""" + included_components = [] + with open(main_nf_test, "r") as fh: + for line in fh: + if line.strip().startswith("script"): + # get tool/subtool or subworkflow name from script statement, can be: + # if the component is a module TOOL/SUBTOOL: + # '../../SUBTOOL/main.nf' + # '../../../TOOL/SUBTOOL/main.nf' + # '../../../TOOL/main.nf' + # if the component is a module TOOL: + # '../../TOOL/main.nf' + # '../../TOOL/SUBTOOL/main.nf' + # if the component uses full paths or is a subworkflow: + # '(../../)modules/nf-core/TOOL/(SUBTOOL/)main.nf' + # '(../../)subworkflows/nf-core/TOOL/(SUBTOOL/)main.nf' + # the line which uses the current component script: + # '../main.nf' + component = ( + line.strip() + .split("../")[-1] + .split(self.org)[-1] + .split("main.nf")[0] + .strip("'") + .strip('"') + .strip("/") + ) + if ( + "/" in self.component_name + and "/" not in component + and line.count("../") == 2 + and self.org not in line + and component != "" + ): + # Add the current component name "TOOL" to the tag + component = f"{self.component_name.split('/')[0]}/{component}" + if "subworkflows" in line: + # Add the subworkflows prefix to the tag + component = f"subworkflows/{component}" + if component != "": + included_components.append(component) return included_components + + def get_inputs_from_main_nf(self): + """Collect all inputs from the main.nf file.""" + inputs = [] + with open(self.main_nf, "r") as f: + data = f.read() + # get input values from main.nf after "input:", which can be formatted as tuple val(foo) path(bar) or val foo or val bar or path bar or path foo + # regex matches: + # val(foo) + # path(bar) + # val foo + # val bar + # path bar + # path foo + # don't match anything inside comments or after "output:" + if "input:" not in data: + log.info(f"Could not find any inputs in {self.main_nf}") + return inputs + input_data = data.split("input:")[1].split("output:")[0] + regex = r"(val|path)\s*(\(([^)]+)\)|\s*([^)\s,]+))" + matches = re.finditer(regex, input_data, re.MULTILINE) + for matchNum, match in enumerate(matches, start=1): + if match.group(3): + inputs.append(match.group(3)) + elif match.group(4): + inputs.append(match.group(4)) + log.info(f"Found {len(inputs)} inputs in {self.main_nf}") + self.inputs = inputs + + def get_outputs_from_main_nf(self): + outputs = [] + with open(self.main_nf, "r") as f: + data = f.read() + # get output values from main.nf after "output:". the names are always after "emit:" + if "output:" not in data: + log.info(f"Could not find any outputs in {self.main_nf}") + return outputs + output_data = data.split("output:")[1].split("when:")[0] + regex = r"emit:\s*([^)\s,]+)" + matches = re.finditer(regex, output_data, re.MULTILINE) + for matchNum, match in enumerate(matches, start=1): + outputs.append(match.group(1)) + log.info(f"Found {len(outputs)} outputs in {self.main_nf}") + self.outputs = outputs diff --git a/nf_core/components/update.py b/nf_core/components/update.py index 6fc6e03544..077cb2b840 100644 --- a/nf_core/components/update.py +++ b/nf_core/components/update.py @@ -288,20 +288,21 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr updated.append(component) recursive_update = True modules_to_update, subworkflows_to_update = self.get_components_to_update(component) - if not silent and not self.update_all and len(modules_to_update + subworkflows_to_update) > 0: - log.warning( - f"All modules and subworkflows linked to the updated {self.component_type[:-1]} will be {'asked for update' if self.show_diff else 'automatically updated'}.\n" - "It is advised to keep all your modules and subworkflows up to date.\n" - "It is not guaranteed that a subworkflow will continue working as expected if all modules/subworkflows used in it are not up to date.\n" - ) - if self.update_deps: - recursive_update = True - else: - recursive_update = questionary.confirm( - "Would you like to continue updating all modules and subworkflows?", - default=True, - style=nf_core.utils.nfcore_question_style, - ).unsafe_ask() + if not silent and len(modules_to_update + subworkflows_to_update) > 0: + if not self.update_all: + log.warning( + f"All modules and subworkflows linked to the updated {self.component_type[:-1]} will be {'asked for update' if self.show_diff else 'automatically updated'}.\n" + "It is advised to keep all your modules and subworkflows up to date.\n" + "It is not guaranteed that a subworkflow will continue working as expected if all modules/subworkflows used in it are not up to date.\n" + ) + if self.update_deps: + recursive_update = True + else: + recursive_update = questionary.confirm( + "Would you like to continue updating all modules and subworkflows?", + default=True, + style=nf_core.utils.nfcore_question_style, + ).unsafe_ask() if recursive_update and len(modules_to_update + subworkflows_to_update) > 0: # Update linked components self.update_linked_components(modules_to_update, subworkflows_to_update, updated) @@ -323,8 +324,12 @@ def update(self, component=None, silent=False, updated=None, check_diff_exist=Tr ) elif not all_patches_successful and not silent: log.info(f"Updates complete. Please apply failed patch{plural_es(components_info)} manually.") + self.modules_json.load() + self.modules_json.dump(run_prettier=True) elif not silent: log.info("Updates complete :sparkles:") + self.modules_json.load() + self.modules_json.dump(run_prettier=True) return exit_value diff --git a/nf_core/create.py b/nf_core/create.py index 470623f551..56d0912a07 100644 --- a/nf_core/create.py +++ b/nf_core/create.py @@ -11,7 +11,7 @@ import time from pathlib import Path -import filetype +import filetype # type: ignore import git import jinja2 import questionary diff --git a/nf_core/download.py b/nf_core/download.py index 9ca786b5e3..08bef935ba 100644 --- a/nf_core/download.py +++ b/nf_core/download.py @@ -493,7 +493,7 @@ def prompt_singularity_cachedir_utilization(self): ): stderr.print( "\nIf you are working on the same system where you will run Nextflow, you can amend the downloaded images to the ones in the" - "[blue not bold]$NXF_SINGULARITY_CACHEDIR[/] folder, Nextflow will automatically find them." + "[blue not bold]$NXF_SINGULARITY_CACHEDIR[/] folder, Nextflow will automatically find them. " "However if you will transfer the downloaded files to a different system then they should be copied to the target folder." ) self.container_cache_utilisation = questionary.select( @@ -1081,7 +1081,7 @@ def get_singularity_images(self, current_revision=""): continue except ContainerError.ImageNotFound as e: # Try other registries - if e.error_log.absoluteURI: + if e.error_log.absolute_URI: break # there no point in trying other registries if absolute URI was specified. else: continue @@ -1092,7 +1092,7 @@ def get_singularity_images(self, current_revision=""): # Try other registries log.error(e.message) log.error(e.helpmessage) - if e.error_log.absoluteURI: + if e.error_log.absolute_URI: break # there no point in trying other registries if absolute URI was specified. else: continue @@ -1247,7 +1247,7 @@ def singularity_pull_image(self, container, out_path, cache_path, library, progr # Thus, if an explicit registry is specified, the provided -l value is ignored. container_parts = container.split("/") if len(container_parts) > 2: - address = container + address = f"docker://{container}" absolute_URI = True else: address = f"docker://{library}/{container.replace('docker://', '')}" diff --git a/nf_core/gitpod/gitpod.Dockerfile b/nf_core/gitpod/gitpod.Dockerfile index 47b27bb514..e721f210d0 100644 --- a/nf_core/gitpod/gitpod.Dockerfile +++ b/nf_core/gitpod/gitpod.Dockerfile @@ -1,3 +1,6 @@ +# Test build locally before making a PR +# docker build -t gitpod:test -f nf_core/gitpod/gitpod.Dockerfile . + FROM gitpod/workspace-base USER root @@ -43,16 +46,17 @@ RUN conda config --add channels defaults && \ conda config --add channels bioconda && \ conda config --add channels conda-forge && \ conda config --set channel_priority strict && \ - conda install --quiet --yes --name base mamba && \ - mamba install --quiet --yes --name base \ + conda install --quiet --yes --name base \ + mamba \ nextflow \ nf-core \ nf-test \ black \ prettier \ pre-commit \ + openjdk \ pytest-workflow && \ - mamba clean --all -f -y + conda clean --all --force-pkgs-dirs --yes # Update Nextflow RUN nextflow self-update diff --git a/nf_core/lint/__init__.py b/nf_core/lint/__init__.py index 70f7ea925f..797ebbcc91 100644 --- a/nf_core/lint/__init__.py +++ b/nf_core/lint/__init__.py @@ -164,26 +164,30 @@ class PipelineLint(nf_core.utils.Pipeline): warned (list): A list of tuples of the form: ``(, )`` """ - from .actions_awsfulltest import actions_awsfulltest - from .actions_awstest import actions_awstest - from .actions_ci import actions_ci - from .actions_schema_validation import actions_schema_validation - from .files_exist import files_exist - from .files_unchanged import files_unchanged - from .merge_markers import merge_markers - from .modules_json import modules_json - from .modules_structure import modules_structure - from .multiqc_config import multiqc_config - from .nextflow_config import nextflow_config - from .pipeline_name_conventions import pipeline_name_conventions - from .pipeline_todos import pipeline_todos - from .readme import readme - from .schema_description import schema_description - from .schema_lint import schema_lint - from .schema_params import schema_params - from .system_exit import system_exit - from .template_strings import template_strings - from .version_consistency import version_consistency + from .actions_awsfulltest import actions_awsfulltest # type: ignore[misc] + from .actions_awstest import actions_awstest # type: ignore[misc] + from .actions_ci import actions_ci # type: ignore[misc] + from .actions_schema_validation import ( # type: ignore[misc] + actions_schema_validation, + ) + from .files_exist import files_exist # type: ignore[misc] + from .files_unchanged import files_unchanged # type: ignore[misc] + from .merge_markers import merge_markers # type: ignore[misc] + from .modules_json import modules_json # type: ignore[misc] + from .modules_structure import modules_structure # type: ignore[misc] + from .multiqc_config import multiqc_config # type: ignore[misc] + from .nextflow_config import nextflow_config # type: ignore[misc] + from .pipeline_name_conventions import ( # type: ignore[misc] + pipeline_name_conventions, + ) + from .pipeline_todos import pipeline_todos # type: ignore[misc] + from .readme import readme # type: ignore[misc] + from .schema_description import schema_description # type: ignore[misc] + from .schema_lint import schema_lint # type: ignore[misc] + from .schema_params import schema_params # type: ignore[misc] + from .system_exit import system_exit # type: ignore[misc] + from .template_strings import template_strings # type: ignore[misc] + from .version_consistency import version_consistency # type: ignore[misc] def __init__( self, wf_path, release_mode=False, fix=(), key=None, fail_ignored=False, fail_warned=False, hide_progress=False @@ -367,10 +371,10 @@ def format_result(test_results): Given an list of error message IDs and the message texts, return a nicely formatted string for the terminal with appropriate ASCII colours. """ + tools_version = __version__ + if "dev" in __version__: + tools_version = "latest" for eid, msg in test_results: - tools_version = __version__ - if "dev" in __version__: - tools_version = "latest" yield Markdown( f"[{eid}](https://nf-co.re/tools/docs/{tools_version}/pipeline_lint_tests/{eid}.html): {msg}" ) @@ -462,6 +466,10 @@ def _get_results_md(self): if len(self.failed) > 0: overall_result = "Failed :x:" + tools_version = __version__ + if "dev" in __version__: + tools_version = "latest" + # List of tests for details test_failure_count = "" test_failures = "" @@ -470,7 +478,7 @@ def _get_results_md(self): test_failures = "### :x: Test failures:\n\n{}\n\n".format( "\n".join( [ - f"* [{eid}](https://nf-co.re/tools-docs/lint_tests/{eid}.html) - " + f"* [{eid}](https://nf-co.re/tools/docs/{tools_version}/pipeline_lint_tests/{eid}.html) - " f"{strip_ansi_codes(msg, '`')}" for eid, msg in self.failed ] @@ -484,7 +492,7 @@ def _get_results_md(self): test_ignored = "### :grey_question: Tests ignored:\n\n{}\n\n".format( "\n".join( [ - f"* [{eid}](https://nf-co.re/tools-docs/lint_tests/{eid}.html) - " + f"* [{eid}](https://nf-co.re/tools/docs/{tools_version}/pipeline_lint_tests/{eid}.html) - " f"{strip_ansi_codes(msg, '`')}" for eid, msg in self.ignored ] @@ -498,7 +506,7 @@ def _get_results_md(self): test_fixed = "### :grey_question: Tests fixed:\n\n{}\n\n".format( "\n".join( [ - f"* [{eid}](https://nf-co.re/tools-docs/lint_tests/{eid}.html) - " + f"* [{eid}](https://nf-co.re/tools/docs/{tools_version}/pipeline_lint_tests/{eid}.html) - " f"{strip_ansi_codes(msg, '`')}" for eid, msg in self.fixed ] @@ -512,7 +520,7 @@ def _get_results_md(self): test_warnings = "### :heavy_exclamation_mark: Test warnings:\n\n{}\n\n".format( "\n".join( [ - f"* [{eid}](https://nf-co.re/tools-docs/lint_tests/{eid}.html) - " + f"* [{eid}](https://nf-co.re/tools/docs/{tools_version}/pipeline_lint_tests/{eid}.html) - " f"{strip_ansi_codes(msg, '`')}" for eid, msg in self.warned ] @@ -527,7 +535,7 @@ def _get_results_md(self): "\n".join( [ ( - f"* [{eid}](https://nf-co.re/tools-docs/lint_tests/{eid}.html)" + f"* [{eid}](https://nf-co.re/tools/docs/{tools_version}/pipeline_lint_tests/{eid}.html)" f" - {strip_ansi_codes(msg, '`')}" ) for eid, msg in self.passed diff --git a/nf_core/lint/multiqc_config.py b/nf_core/lint/multiqc_config.py index a1b831ae2d..cbbeae07a8 100644 --- a/nf_core/lint/multiqc_config.py +++ b/nf_core/lint/multiqc_config.py @@ -1,9 +1,10 @@ import os +from typing import Dict, List import yaml -def multiqc_config(self): +def multiqc_config(self) -> Dict[str, List[str]]: """Make sure basic multiQC plugins are installed and plots are exported Basic template: @@ -20,8 +21,8 @@ def multiqc_config(self): export_plots: true """ - passed = [] - failed = [] + passed: List[str] = [] + failed: List[str] = [] # Remove field that should be ignored according to the linting config ignore_configs = self.lint_config.get("multiqc_config", []) @@ -38,10 +39,16 @@ def multiqc_config(self): except Exception as e: return {"failed": [f"Could not parse yaml file: {fn}, {e}"]} - # Check that the report_comment exists and matches + # check if requried sections are present + required_sections = ["report_section_order", "export_plots", "report_comment"] + for section in required_sections: + if section not in mqc_yml and section not in ignore_configs: + failed.append(f"'assets/multiqc_config.yml' does not contain `{section}`") + return {"passed": passed, "failed": failed} + else: + passed.append(f"'assets/multiqc_config.yml' contains `{section}`") + try: - if "report_section_order" not in mqc_yml: - raise AssertionError() orders = {} summary_plugin_name = f"{self.pipeline_prefix}-{self.pipeline_name}-summary" min_plugins = ["software_versions", summary_plugin_name] @@ -70,27 +77,37 @@ def multiqc_config(self): if "report_comment" not in ignore_configs: # Check that the minimum plugins exist and are coming first in the summary - try: - version = self.nf_config.get("manifest.version", "").strip(" '\"") - version = "dev" if "dev" in version else version - if "report_comment" not in mqc_yml: - raise AssertionError() - if mqc_yml["report_comment"].strip() != ( - f'This report has been generated by the nf-core/{self.pipeline_name} analysis pipeline. For information about how to ' - f'interpret these results, please see the documentation.' - ): - raise AssertionError() - except (AssertionError, KeyError, TypeError): - failed.append("'assets/multiqc_config.yml' does not contain a matching 'report_comment'.") + version = self.nf_config.get("manifest.version", "").strip(" '\"") + if "dev" in version: + version = "dev" + report_comments = ( + f'This report has been generated by the nf-core/{self.pipeline_name}' + f" analysis pipeline. For information about how to interpret these results, please see the " + f'documentation.' + ) + + else: + report_comments = ( + f'This report has been generated by the nf-core/{self.pipeline_name}' + f" analysis pipeline. For information about how to interpret these results, please see the " + f'documentation.' + ) + + if mqc_yml["report_comment"].strip() != report_comments: + # find where the report_comment is wrong and give it as a hint + hint = report_comments + failed.append( + f"'assets/multiqc_config.yml' does not contain a matching 'report_comment'. \n" + f"The expected comment is: \n" + f"```{hint}``` \n" + f"The current comment is: \n" + f"```{ mqc_yml['report_comment'].strip()}```" + ) else: passed.append("'assets/multiqc_config.yml' contains a matching 'report_comment'.") # Check that export_plots is activated try: - if "export_plots" not in mqc_yml: - raise AssertionError() if not mqc_yml["export_plots"]: raise AssertionError() except (AssertionError, KeyError, TypeError): diff --git a/nf_core/lint/nextflow_config.py b/nf_core/lint/nextflow_config.py index 71c908b7b5..24f1e5c12f 100644 --- a/nf_core/lint/nextflow_config.py +++ b/nf_core/lint/nextflow_config.py @@ -203,6 +203,8 @@ def nextflow_config(self): # Check the variables that should be set to 'true' for k in ["timeline.enabled", "report.enabled", "trace.enabled", "dag.enabled"]: + if k in ignore_configs: + continue if self.nf_config.get(k) == "true": passed.append(f"Config ``{k}`` had correct value: ``{self.nf_config.get(k)}``") else: diff --git a/nf_core/module-template/modules/environment.yml b/nf_core/module-template/environment.yml similarity index 100% rename from nf_core/module-template/modules/environment.yml rename to nf_core/module-template/environment.yml diff --git a/nf_core/module-template/modules/main.nf b/nf_core/module-template/main.nf similarity index 96% rename from nf_core/module-template/modules/main.nf rename to nf_core/module-template/main.nf index a2cabfd2f4..5258403e8f 100644 --- a/nf_core/module-template/modules/main.nf +++ b/nf_core/module-template/main.nf @@ -27,7 +27,7 @@ process {{ component_name_underscore|upper }} { // For Conda, the build (i.e. "h9402c20_2") must be EXCLUDED to support installation on different operating systems. // TODO nf-core: See section in main README for further information regarding finding and adding container addresses to the section below. {% endif -%} - conda '${modulesDir}/environment.yml' + conda "${moduleDir}/environment.yml" container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? '{{ singularity_container if singularity_container else 'https://depot.galaxyproject.org/singularity/YOUR-TOOL-HERE' }}': '{{ docker_container if docker_container else 'biocontainers/YOUR-TOOL-HERE' }}' }" @@ -91,7 +91,7 @@ process {{ component_name_underscore|upper }} { cat <<-END_VERSIONS > versions.yml "${task.process}": - {{ component }}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//' )) + {{ component }}: \$(samtools --version |& sed '1!d ; s/samtools //') END_VERSIONS """ @@ -113,7 +113,7 @@ process {{ component_name_underscore|upper }} { cat <<-END_VERSIONS > versions.yml "${task.process}": - {{ component }}: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//' )) + {{ component }}: \$(samtools --version |& sed '1!d ; s/samtools //') END_VERSIONS """ } diff --git a/nf_core/module-template/modules/meta.yml b/nf_core/module-template/meta.yml similarity index 100% rename from nf_core/module-template/modules/meta.yml rename to nf_core/module-template/meta.yml diff --git a/nf_core/module-template/modules/tests/main.nf.test b/nf_core/module-template/tests/main.nf.test similarity index 95% rename from nf_core/module-template/modules/tests/main.nf.test rename to nf_core/module-template/tests/main.nf.test index 883a3ffa40..105c70eb96 100644 --- a/nf_core/module-template/modules/tests/main.nf.test +++ b/nf_core/module-template/tests/main.nf.test @@ -40,7 +40,7 @@ nextflow_process { then { assertAll( { assert process.success }, - { assert snapshot(process.out.versions).match("versions") } + { assert snapshot(process.out).match() } //TODO nf-core: Add all required assertions to verify the test output. ) } diff --git a/nf_core/module-template/modules/tests/tags.yml b/nf_core/module-template/tests/tags.yml similarity index 100% rename from nf_core/module-template/modules/tests/tags.yml rename to nf_core/module-template/tests/tags.yml diff --git a/nf_core/modules/bump_versions.py b/nf_core/modules/bump_versions.py index cb77d4c043..25259f1a16 100644 --- a/nf_core/modules/bump_versions.py +++ b/nf_core/modules/bump_versions.py @@ -49,7 +49,7 @@ def __init__( self.tools_config: Dict[str, Any] = {} def bump_versions( - self, module: Union[NFCoreComponent, None] = None, all_modules: bool = False, show_uptodate: bool = False + self, module: Union[str, None] = None, all_modules: bool = False, show_uptodate: bool = False ) -> None: """ Bump the container and conda version of single module or all modules diff --git a/nf_core/modules/create.py b/nf_core/modules/create.py index b5368130ce..a5e0795a9f 100644 --- a/nf_core/modules/create.py +++ b/nf_core/modules/create.py @@ -17,6 +17,7 @@ def __init__( conda_name=None, conda_version=None, empty_template=False, + migrate_pytest=False, ): super().__init__( "modules", @@ -29,4 +30,5 @@ def __init__( conda_name, conda_version, empty_template, + migrate_pytest, ) diff --git a/nf_core/modules/lint/environment_yml.py b/nf_core/modules/lint/environment_yml.py index d9c0cbf7f9..a052425539 100644 --- a/nf_core/modules/lint/environment_yml.py +++ b/nf_core/modules/lint/environment_yml.py @@ -32,7 +32,7 @@ def environment_yml(module_lint_object: ComponentLint, module: NFCoreComponent) # check if the module's main.nf requires a conda environment with open(Path(module.component_dir, "main.nf"), "r") as fh: main_nf = fh.read() - if "conda '${modulesDir}/environment.yml'" in main_nf: + if 'conda "${moduleDir}/environment.yml"' in main_nf: module.failed.append( ("environment_yml_exists", "Module's `environment.yml` does not exist", module.environment_yml) ) diff --git a/nf_core/modules/lint/meta_yml.py b/nf_core/modules/lint/meta_yml.py index 6bdc4ed223..7552c1ceae 100644 --- a/nf_core/modules/lint/meta_yml.py +++ b/nf_core/modules/lint/meta_yml.py @@ -22,7 +22,24 @@ def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None and module input is consistent between the ``meta.yml`` and the ``main.nf``. + If the module has inputs or outputs, they are expected to be + formatted as: + + ..code-block:: + tuple val(foo) path(bar) + val foo + path foo + + or permutations of the above. + + Args: + module_lint_object (ComponentLint): The lint object for the module + module (NFCoreComponent): The module to lint + """ + + module.get_inputs_from_main_nf() + module.get_outputs_from_main_nf() # Check if we have a patch file, get original file in that case meta_yaml = None if module.is_patched: @@ -45,14 +62,14 @@ def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None return # Confirm that the meta.yml file is valid according to the JSON schema - valid_meta_yml = True + valid_meta_yml = False try: with open(Path(module_lint_object.modules_repo.local_repo_dir, "modules/meta-schema.json"), "r") as fh: schema = json.load(fh) validators.validate(instance=meta_yaml, schema=schema) module.passed.append(("meta_yml_valid", "Module `meta.yml` is valid", module.meta_yml)) + valid_meta_yml = True except exceptions.ValidationError as e: - valid_meta_yml = False hint = "" if len(e.path) > 0: hint = f"\nCheck the entry for `{e.path[0]}`." @@ -79,26 +96,87 @@ def meta_yml(module_lint_object: ComponentLint, module: NFCoreComponent) -> None meta_input = [list(x.keys())[0] for x in meta_yaml["input"]] for input in module.inputs: if input in meta_input: - module.passed.append(("meta_input", f"`{input}` specified", module.meta_yml)) + module.passed.append(("meta_input_main_only", f"`{input}` specified", module.meta_yml)) + else: + module.warned.append( + ( + "meta_input_main_only", + f"`{input}` is present as an input in the `main.nf`, but missing in `meta.yml`", + module.meta_yml, + ) + ) + # check if there are any inputs in meta.yml that are not in main.nf + for input in meta_input: + if input in module.inputs: + module.passed.append( + ( + "meta_input_meta_only", + f"`{input}` is present as an input in `meta.yml` and `main.nf`", + module.meta_yml, + ) + ) else: - module.failed.append(("meta_input", f"`{input}` missing in `meta.yml`", module.meta_yml)) + module.warned.append( + ( + "meta_input_meta_only", + f"`{input}` is present as an input in `meta.yml` but not in `main.nf`", + module.meta_yml, + ) + ) - if "output" in meta_yaml: + if "output" in meta_yaml and meta_yaml["output"] is not None: meta_output = [list(x.keys())[0] for x in meta_yaml["output"]] for output in module.outputs: if output in meta_output: - module.passed.append(("meta_output", f"`{output}` specified", module.meta_yml)) + module.passed.append(("meta_output_main_only", f"`{output}` specified", module.meta_yml)) else: - module.failed.append(("meta_output", f"`{output}` missing in `meta.yml`", module.meta_yml)) - + module.warned.append( + ( + "meta_output_main_only", + f"`{output}` is present as an output in the `main.nf`, but missing in `meta.yml`", + module.meta_yml, + ) + ) + # check if there are any outputs in meta.yml that are not in main.nf + for output in meta_output: + if output in module.outputs: + module.passed.append( + ( + "meta_output_meta_only", + f"`{output}` is present as an output in `meta.yml` and `main.nf`", + module.meta_yml, + ) + ) + elif output == "meta": + module.passed.append( + ( + "meta_output_meta_only", + f"`{output}` is skipped for `meta.yml` outputs", + module.meta_yml, + ) + ) + else: + module.warned.append( + ( + "meta_output_meta_only", + f"`{output}` is present as an output in `meta.yml` but not in `main.nf`", + module.meta_yml, + ) + ) # confirm that the name matches the process name in main.nf if meta_yaml["name"].upper() == module.process_name: - module.passed.append(("meta_name", "Correct name specified in `meta.yml`", module.meta_yml)) + module.passed.append( + ( + "meta_name", + "Correct name specified in `meta.yml`.", + module.meta_yml, + ) + ) else: module.failed.append( ( "meta_name", - f"Conflicting process name between meta.yml (`{meta_yaml['name']}`) and main.nf (`{module.process_name}`)", + f"Conflicting `process` name between meta.yml (`{meta_yaml['name']}`) and main.nf (`{module.process_name}`)", module.meta_yml, ) ) diff --git a/nf_core/modules/lint/module_tests.py b/nf_core/modules/lint/module_tests.py index 8188787f90..87033e3f49 100644 --- a/nf_core/modules/lint/module_tests.py +++ b/nf_core/modules/lint/module_tests.py @@ -1,6 +1,7 @@ """ Lint the tests of a module in nf-core/modules """ +import json import logging from pathlib import Path @@ -34,12 +35,12 @@ def module_tests(_, module: NFCoreComponent): # Lint the test main.nf file if module.nftest_main_nf.is_file(): - module.passed.append(("test_main_exists", "test `main.nf.test` exists", module.nftest_main_nf)) + module.passed.append(("test_main_nf_exists", "test `main.nf.test` exists", module.nftest_main_nf)) else: if is_pytest: - module.warned.append(("test_main_exists", "test `main.nf.test` does not exist", module.nftest_main_nf)) + module.warned.append(("test_main_nf_exists", "test `main.nf.test` does not exist", module.nftest_main_nf)) else: - module.failed.append(("test_main_exists", "test `main.nf.test` does not exist", module.nftest_main_nf)) + module.failed.append(("test_main_nf_exists", "test `main.nf.test` does not exist", module.nftest_main_nf)) if module.nftest_main_nf.is_file(): # Check if main.nf.test.snap file exists, if 'snap(' is inside main.nf.test @@ -52,36 +53,64 @@ def module_tests(_, module: NFCoreComponent): ) # Validate no empty files with open(snap_file, "r") as snap_fh: - snap_content = snap_fh.read() - if "d41d8cd98f00b204e9800998ecf8427e" in snap_content: + try: + snap_content = json.load(snap_fh) + for test_name in snap_content.keys(): + if "d41d8cd98f00b204e9800998ecf8427e" in str(snap_content[test_name]): + if "stub" not in test_name: + module.failed.append( + ( + "test_snap_md5sum", + "md5sum for empty file found: d41d8cd98f00b204e9800998ecf8427e", + snap_file, + ) + ) + else: + module.passed.append( + ( + "test_snap_md5sum", + "md5sum for empty file found, but it is a stub test", + snap_file, + ) + ) + else: + module.passed.append( + ( + "test_snap_md5sum", + "no md5sum for empty file found", + snap_file, + ) + ) + if "7029066c27ac6f5ef18d660d5741979a" in str(snap_content[test_name]): + if "stub" not in test_name: + module.failed.append( + ( + "test_snap_md5sum", + "md5sum for compressed empty file found: 7029066c27ac6f5ef18d660d5741979a", + snap_file, + ) + ) + else: + module.passed.append( + ( + "test_snap_md5sum", + "md5sum for compressed empty file found, but it is a stub test", + snap_file, + ) + ) + else: + module.passed.append( + ( + "test_snap_md5sum", + "no md5sum for compressed empty file found", + snap_file, + ) + ) + except json.decoder.JSONDecodeError as e: module.failed.append( ( - "test_snap_md5sum", - "md5sum for empty file found: d41d8cd98f00b204e9800998ecf8427e", - snap_file, - ) - ) - else: - module.passed.append( - ( - "test_snap_md5sum", - "no md5sum for empty file found", - snap_file, - ) - ) - if "7029066c27ac6f5ef18d660d5741979a" in snap_content: - module.failed.append( - ( - "test_snap_md5sum", - "md5sum for compressed empty file found: 7029066c27ac6f5ef18d660d5741979a", - snap_file, - ) - ) - else: - module.passed.append( - ( - "test_snap_md5sum", - "no md5sum for compressed empty file found", + "test_snapshot_exists", + f"snapshot file `main.nf.test.snap` can't be read: {e}", snap_file, ) ) @@ -94,8 +123,11 @@ def module_tests(_, module: NFCoreComponent): required_tags = ["modules", "modules_nfcore", module.component_name] if module.component_name.count("/") == 1: required_tags.append(module.component_name.split("/")[0]) + chained_components_tags = module._get_included_components_in_chained_tests(module.nftest_main_nf) missing_tags = [] - for tag in required_tags: + log.debug(f"Required tags: {required_tags}") + log.debug(f"Included components for chained nf-tests: {chained_components_tags}") + for tag in set(required_tags + chained_components_tags): if tag not in main_nf_tags: missing_tags.append(tag) if len(missing_tags) == 0: @@ -104,7 +136,7 @@ def module_tests(_, module: NFCoreComponent): module.failed.append( ( "test_main_tags", - f"Tags do not adhere to guidelines. Tags missing in `main.nf.test`: {missing_tags}", + f"Tags do not adhere to guidelines. Tags missing in `main.nf.test`: `{','.join(missing_tags)}`", module.nftest_main_nf, ) ) @@ -140,12 +172,18 @@ def module_tests(_, module: NFCoreComponent): if f"modules/{module.org}/{module.component_name}/**" in tags_yml[module.component_name]: module.passed.append(("test_tags_yml", "correct path in tags.yml", module.tags_yml)) else: - module.failed.append(("test_tags_yml", "incorrect path in tags.yml", module.tags_yml)) + module.failed.append( + ( + "test_tags_yml", + f"incorrect path in tags.yml, expected `modules/{module.org}/{module.component_name}/**`, got `{tags_yml[module.component_name][0]}`", + module.tags_yml, + ) + ) else: module.failed.append( ( "test_tags_yml", - "incorrect entry in tags.yml, should be '' or '/'", + f"incorrect key in tags.yml, should be `{module.component_name}`, got `{list(tags_yml.keys())[0]}`.", module.tags_yml, ) ) @@ -154,3 +192,17 @@ def module_tests(_, module: NFCoreComponent): module.warned.append(("test_tags_yml_exists", "file `tags.yml` does not exist", module.tags_yml)) else: module.failed.append(("test_tags_yml_exists", "file `tags.yml` does not exist", module.tags_yml)) + + # Check that the old test directory does not exist + if not is_pytest: + old_test_dir = Path(module.base_dir, "tests", "modules", module.component_name) + if old_test_dir.is_dir(): + module.failed.append( + ( + "test_old_test_dir", + f"Pytest files are still present at `{Path('tests', 'modules', module.component_name)}`. Please remove this directory and its contents.", + old_test_dir, + ) + ) + else: + module.passed.append(("test_old_test_dir", "Old pytests don't exist for this module", old_test_dir)) diff --git a/nf_core/modules/modules_json.py b/nf_core/modules/modules_json.py index 9c3d1ae9b1..ee912843b6 100644 --- a/nf_core/modules/modules_json.py +++ b/nf_core/modules/modules_json.py @@ -6,9 +6,11 @@ import shutil import tempfile from pathlib import Path +from typing import Union import git import questionary +import rich.prompt from git.exc import GitCommandError import nf_core.utils @@ -41,7 +43,7 @@ def __init__(self, pipeline_dir): self.modules_dir = Path(self.dir, "modules") self.subworkflows_dir = Path(self.dir, "subworkflows") self.modules_json_path = Path(self.dir, "modules.json") - self.modules_json = None + self.modules_json: Union(dict, None) = None self.pipeline_modules = None self.pipeline_subworkflows = None self.pipeline_components = None @@ -67,7 +69,13 @@ def create(self): new_modules_json = {"name": pipeline_name.strip("'"), "homePage": pipeline_url.strip("'"), "repos": {}} if not self.modules_dir.exists(): - raise UserWarning("Can't find a ./modules directory. Is this a DSL2 pipeline?") + if rich.prompt.Confirm.ask( + "[bold][blue]?[/] Can't find a ./modules directory. Would you like me to create one?", default=True + ): + log.info(f"Creating ./modules directory in '{self.dir}'") + self.modules_dir.mkdir() + else: + raise UserWarning("Cannot proceed without a ./modules directory.") # Get repositories repos, _ = self.get_pipeline_module_repositories("modules", self.modules_dir) @@ -678,7 +686,7 @@ def update( repo_component_entry[component_name]["installed_by"] = [installed_by] finally: new_installed_by = repo_component_entry[component_name]["installed_by"] + list(installed_by_log) - repo_component_entry[component_name]["installed_by"] = [*set(new_installed_by)] + repo_component_entry[component_name]["installed_by"] = sorted([*set(new_installed_by)]) # Sort the 'modules.json' repo entries self.modules_json["repos"] = nf_core.utils.sort_dictionary(self.modules_json["repos"]) @@ -1035,13 +1043,17 @@ def get_component_branch(self, component_type, component, repo_url, install_dir) ) return branch - def dump(self): + def dump(self, run_prettier: bool = False): """ Sort the modules.json, and write it to file """ # Sort the modules.json self.modules_json["repos"] = nf_core.utils.sort_dictionary(self.modules_json["repos"]) - dump_json_with_prettier(self.modules_json_path, self.modules_json) + if run_prettier: + dump_json_with_prettier(self.modules_json_path, self.modules_json) + else: + with open(self.modules_json_path, "w") as fh: + json.dump(self.modules_json, fh, indent=4) def resolve_missing_installation(self, missing_installation, component_type): missing_but_in_mod_json = [ diff --git a/nf_core/modules/modules_utils.py b/nf_core/modules/modules_utils.py index 504cb1095d..3ae01e9eef 100644 --- a/nf_core/modules/modules_utils.py +++ b/nf_core/modules/modules_utils.py @@ -37,7 +37,7 @@ def repo_full_name_from_remote(remote_url: str) -> str: return path -def get_installed_modules(dir: str, repo_type="modules") -> Tuple[List[str], List[str]]: +def get_installed_modules(dir: str, repo_type="modules") -> Tuple[List[str], List[NFCoreComponent]]: """ Make a list of all modules installed in this repository @@ -52,7 +52,7 @@ def get_installed_modules(dir: str, repo_type="modules") -> Tuple[List[str], Lis """ # initialize lists local_modules: List[str] = [] - nfcore_modules: List[str] = [] + nfcore_modules_names: List[str] = [] local_modules_dir: Optional[str] = None nfcore_modules_dir = os.path.join(dir, "modules", "nf-core") @@ -76,9 +76,9 @@ def get_installed_modules(dir: str, repo_type="modules") -> Tuple[List[str], Lis # Not a module, but contains sub-modules if not "main.nf" in m_content: for tool in m_content: - nfcore_modules.append(os.path.join(m, tool)) + nfcore_modules_names.append(os.path.join(m, tool)) else: - nfcore_modules.append(m) + nfcore_modules_names.append(m) # Make full (relative) file paths and create NFCoreComponent objects if local_modules_dir: @@ -93,7 +93,7 @@ def get_installed_modules(dir: str, repo_type="modules") -> Tuple[List[str], Lis base_dir=Path(dir), component_type="modules", ) - for m in nfcore_modules + for m in nfcore_modules_names ] return local_modules, nfcore_modules diff --git a/nf_core/params_file.py b/nf_core/params_file.py index 39986b95c2..5c50c53fb9 100644 --- a/nf_core/params_file.py +++ b/nf_core/params_file.py @@ -89,7 +89,7 @@ def __init__( self, pipeline=None, revision=None, - ): + ) -> None: """Initialise the ParamFileBuilder class Args: diff --git a/nf_core/pipeline-template/.github/workflows/ci.yml b/nf_core/pipeline-template/.github/workflows/ci.yml index 521f3e664a..3edd49f09d 100644 --- a/nf_core/pipeline-template/.github/workflows/ci.yml +++ b/nf_core/pipeline-template/.github/workflows/ci.yml @@ -28,7 +28,7 @@ jobs: - "latest-everything" steps: - name: Check out pipeline code - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Install Nextflow uses: nf-core/setup-nextflow@v1 diff --git a/nf_core/pipeline-template/.github/workflows/fix-linting.yml b/nf_core/pipeline-template/.github/workflows/fix-linting.yml index f3dc3e50fe..31e8cd2b36 100644 --- a/nf_core/pipeline-template/.github/workflows/fix-linting.yml +++ b/nf_core/pipeline-template/.github/workflows/fix-linting.yml @@ -13,7 +13,7 @@ jobs: runs-on: ubuntu-latest steps: # Use the @nf-core-bot token to check out so we can push later - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: token: ${{ secrets.nf_core_bot_auth_token }} @@ -24,7 +24,7 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.nf_core_bot_auth_token }} - - uses: actions/setup-node@v3 + - uses: actions/setup-node@v4 - name: Install Prettier run: npm install -g prettier @prettier/plugin-php diff --git a/nf_core/pipeline-template/.github/workflows/linting.yml b/nf_core/pipeline-template/.github/workflows/linting.yml index 29fc466ed6..64d1851f2e 100644 --- a/nf_core/pipeline-template/.github/workflows/linting.yml +++ b/nf_core/pipeline-template/.github/workflows/linting.yml @@ -14,9 +14,9 @@ jobs: EditorConfig: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - - uses: actions/setup-node@v3 + - uses: actions/setup-node@v4 - name: Install editorconfig-checker run: npm install -g editorconfig-checker @@ -27,9 +27,9 @@ jobs: Prettier: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - - uses: actions/setup-node@v3 + - uses: actions/setup-node@v4 - name: Install Prettier run: npm install -g prettier @@ -40,7 +40,7 @@ jobs: PythonBlack: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Check code lints with Black uses: psf/black@stable @@ -71,14 +71,14 @@ jobs: runs-on: ubuntu-latest steps: - name: Check out pipeline code - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Install Nextflow uses: nf-core/setup-nextflow@v1 - uses: actions/setup-python@v4 with: - python-version: "3.11" + python-version: "3.12" architecture: "x64" - name: Install dependencies diff --git a/nf_core/pipeline-template/.gitpod.yml b/nf_core/pipeline-template/.gitpod.yml index 25488dcc08..acf7269536 100644 --- a/nf_core/pipeline-template/.gitpod.yml +++ b/nf_core/pipeline-template/.gitpod.yml @@ -4,7 +4,9 @@ tasks: command: | pre-commit install --install-hooks nextflow self-update - + - name: unset JAVA_TOOL_OPTIONS + command: | + unset JAVA_TOOL_OPTIONS vscode: extensions: # based on nf-core.nf-core-extensionpack - codezombiech.gitignore # Language support for .gitignore files diff --git a/nf_core/pipeline-template/assets/multiqc_config.yml b/nf_core/pipeline-template/assets/multiqc_config.yml index 9ab59067a1..39943ffe49 100644 --- a/nf_core/pipeline-template/assets/multiqc_config.yml +++ b/nf_core/pipeline-template/assets/multiqc_config.yml @@ -1,6 +1,6 @@ report_comment: > {% if 'dev' in version -%} - This report has been generated by the {{ name }} + This report has been generated by the {{ name }} analysis pipeline.{% if branded %} For information about how to interpret these results, please see the documentation.{% endif %} {%- else %} diff --git a/nf_core/pipeline-template/assets/slackreport.json b/nf_core/pipeline-template/assets/slackreport.json index ec03b3968a..96d2cb8afc 100644 --- a/nf_core/pipeline-template/assets/slackreport.json +++ b/nf_core/pipeline-template/assets/slackreport.json @@ -3,7 +3,7 @@ { "fallback": "Plain-text summary of the attachment.", "color": "<% if (success) { %>good<% } else { %>danger<%} %>", - "author_name": "{{ name }} v${version} - ${runName}", + "author_name": "{{ name }} ${version} - ${runName}", "author_icon": "https://www.nextflow.io/docs/latest/_static/favicon.ico", "text": "<% if (success) { %>Pipeline completed successfully!<% } else { %>Pipeline completed with errors<% } %>", "fields": [ diff --git a/nf_core/pipeline-template/nextflow.config b/nf_core/pipeline-template/nextflow.config index 20ec78b5fc..20423a5f2e 100644 --- a/nf_core/pipeline-template/nextflow.config +++ b/nf_core/pipeline-template/nextflow.config @@ -121,6 +121,7 @@ profiles { shifter.enabled = false charliecloud.enabled = false apptainer.enabled = false + runOptions = '-u $(id -u):$(id -g)' } arm { docker.runOptions = '-u $(id -u):$(id -g) --platform=linux/amd64' diff --git a/nf_core/schema.py b/nf_core/schema.py index b00697334b..7e4726f189 100644 --- a/nf_core/schema.py +++ b/nf_core/schema.py @@ -35,7 +35,7 @@ def __init__(self): self.pipeline_dir = None self.schema_filename = None self.schema_defaults = {} - self.schema_params = [] + self.schema_params = {} self.input_params = {} self.pipeline_params = {} self.invalid_nextflow_config_default_parameters = {} @@ -110,7 +110,7 @@ def load_schema(self): with open(self.schema_filename, "r") as fh: self.schema = json.load(fh) self.schema_defaults = {} - self.schema_params = [] + self.schema_params = {} log.debug(f"JSON file loaded: {self.schema_filename}") def sanitise_param_default(self, param): @@ -141,6 +141,9 @@ def sanitise_param_default(self, param): param["default"] = float(param["default"]) return param + if param["default"] is None: + return param + # Strings param["default"] = str(param["default"]) return param @@ -154,18 +157,20 @@ def get_schema_defaults(self): """ # Top level schema-properties (ungrouped) for p_key, param in self.schema.get("properties", {}).items(): - self.schema_params.append(p_key) + self.schema_params[p_key] = ("properties", p_key) if "default" in param: param = self.sanitise_param_default(param) - self.schema_defaults[p_key] = param["default"] + if param["default"] is not None: + self.schema_defaults[p_key] = param["default"] # Grouped schema properties in subschema definitions - for _, definition in self.schema.get("definitions", {}).items(): + for defn_name, definition in self.schema.get("definitions", {}).items(): for p_key, param in definition.get("properties", {}).items(): - self.schema_params.append(p_key) + self.schema_params[p_key] = ("definitions", defn_name, "properties", p_key) if "default" in param: param = self.sanitise_param_default(param) - self.schema_defaults[p_key] = param["default"] + if param["default"] is not None: + self.schema_defaults[p_key] = param["default"] def save_schema(self, suppress_logging=False): """Save a pipeline schema to a file""" @@ -239,9 +244,9 @@ def validate_default_params(self): except jsonschema.exceptions.ValidationError as e: raise AssertionError(f"Default parameters are invalid: {e.message}") for param, default in self.schema_defaults.items(): - if default in ("null", "", None, "None"): + if default in ("null", "", None, "None") or default is False: log.warning( - f"[yellow][!] Default parameter '{param}' is empty or null. It is advisable to remove the default from the schema" + f"[yellow][!] Default parameter '{param}' is empty, null, or False. It is advisable to remove the default from the schema" ) log.info("[green][✓] Default parameters match schema validation") @@ -762,12 +767,15 @@ def prompt_remove_schema_notfound_config(self, p_key): def add_schema_found_configs(self): """ Add anything that's found in the Nextflow params that's missing in the pipeline schema + Update defaults if they have changed """ params_added = [] params_ignore = self.pipeline_params.get("validationSchemaIgnoreParams", "").strip("\"'").split(",") params_ignore.append("validationSchemaIgnoreParams") for p_key, p_val in self.pipeline_params.items(): + s_key = self.schema_params.get(p_key) # Check if key is in schema parameters + # Key is in pipeline but not in schema or ignored from schema if p_key not in self.schema_params and p_key not in params_ignore: if ( self.no_prompts @@ -782,7 +790,35 @@ def add_schema_found_configs(self): self.schema["properties"][p_key] = self.build_schema_param(p_val) log.debug(f"Adding '{p_key}' to pipeline schema") params_added.append(p_key) - + # Param has a default that does not match the schema + elif p_key in self.schema_defaults and (s_def := self.schema_defaults[p_key]) != ( + p_def := self.build_schema_param(p_val).get("default") + ): + if self.no_prompts or Confirm.ask( + f":sparkles: Default for [bold]'params.{p_key}'[/] in the pipeline config does not match schema. (schema: '{s_def}' | config: '{p_def}'). " + "[blue]Update pipeline schema?" + ): + s_key_def = s_key + ("default",) + if p_def is None: + nf_core.utils.nested_delitem(self.schema, s_key_def) + log.debug(f"Removed '{p_key}' default from pipeline schema") + else: + nf_core.utils.nested_setitem(self.schema, s_key_def, p_def) + log.debug(f"Updating '{p_key}' default to '{p_def}' in pipeline schema") + # There is no default in schema but now there is a default to write + elif ( + s_key + and (p_key not in self.schema_defaults) + and (p_key not in params_ignore) + and (p_def := self.build_schema_param(p_val).get("default")) + ): + if self.no_prompts or Confirm.ask( + f":sparkles: Default for [bold]'params.{p_key}'[/] is not in schema (def='{p_def}'). " + "[blue]Update pipeline schema?" + ): + s_key_def = s_key + ("default",) + nf_core.utils.nested_setitem(self.schema, s_key_def, p_def) + log.debug(f"Updating '{p_key}' default to '{p_def}' in pipeline schema") return params_added def build_schema_param(self, p_val): @@ -806,13 +842,15 @@ def build_schema_param(self, p_val): p_val = None # Booleans - if p_val in ["True", "False"]: - p_val = p_val == "True" # Convert to bool + if p_val in ["true", "false", "True", "False"]: + p_val = p_val in ["true", "True"] # Convert to bool p_type = "boolean" - p_schema = {"type": p_type, "default": p_val} + # Don't return a default for anything false-y except 0 + if not p_val and not (p_val == 0 and p_val is not False): + return {"type": p_type} - return p_schema + return {"type": p_type, "default": p_val} def launch_web_builder(self): """ diff --git a/nf_core/subworkflow-template/subworkflows/main.nf b/nf_core/subworkflow-template/main.nf similarity index 100% rename from nf_core/subworkflow-template/subworkflows/main.nf rename to nf_core/subworkflow-template/main.nf diff --git a/nf_core/subworkflow-template/subworkflows/meta.yml b/nf_core/subworkflow-template/meta.yml similarity index 100% rename from nf_core/subworkflow-template/subworkflows/meta.yml rename to nf_core/subworkflow-template/meta.yml diff --git a/nf_core/subworkflow-template/subworkflows/tests/main.nf.test b/nf_core/subworkflow-template/tests/main.nf.test similarity index 98% rename from nf_core/subworkflow-template/subworkflows/tests/main.nf.test rename to nf_core/subworkflow-template/tests/main.nf.test index b0a212f2a3..59c4fdb679 100644 --- a/nf_core/subworkflow-template/subworkflows/tests/main.nf.test +++ b/nf_core/subworkflow-template/tests/main.nf.test @@ -8,7 +8,6 @@ nextflow_workflow { tag "subworkflows" tag "subworkflows_nfcore" - tag "{{ component_name }}" tag "subworkflows/{{ component_name }}" // TODO nf-core: Add tags for all modules used within this subworkflow. Example: tag "samtools" diff --git a/nf_core/subworkflow-template/subworkflows/tests/tags.yml b/nf_core/subworkflow-template/tests/tags.yml similarity index 100% rename from nf_core/subworkflow-template/subworkflows/tests/tags.yml rename to nf_core/subworkflow-template/tests/tags.yml diff --git a/nf_core/subworkflows/create.py b/nf_core/subworkflows/create.py index 963076455e..93e9f271be 100644 --- a/nf_core/subworkflows/create.py +++ b/nf_core/subworkflows/create.py @@ -12,6 +12,7 @@ def __init__( component="", author=None, force=False, + migrate_pytest=False, ): super().__init__( "subworkflows", @@ -19,4 +20,5 @@ def __init__( component, author, force=force, + migrate_pytest=migrate_pytest, ) diff --git a/nf_core/subworkflows/lint/__init__.py b/nf_core/subworkflows/lint/__init__.py index 44c7c21a37..ffba41f9da 100644 --- a/nf_core/subworkflows/lint/__init__.py +++ b/nf_core/subworkflows/lint/__init__.py @@ -29,12 +29,12 @@ class SubworkflowLint(ComponentLint): """ # Import lint functions - from .main_nf import main_nf - from .meta_yml import meta_yml - from .subworkflow_changes import subworkflow_changes - from .subworkflow_tests import subworkflow_tests - from .subworkflow_todos import subworkflow_todos - from .subworkflow_version import subworkflow_version + from .main_nf import main_nf # type: ignore[misc] + from .meta_yml import meta_yml # type: ignore[misc] + from .subworkflow_changes import subworkflow_changes # type: ignore[misc] + from .subworkflow_tests import subworkflow_tests # type: ignore[misc] + from .subworkflow_todos import subworkflow_todos # type: ignore[misc] + from .subworkflow_version import subworkflow_version # type: ignore[misc] def __init__( self, diff --git a/nf_core/subworkflows/lint/subworkflow_tests.py b/nf_core/subworkflows/lint/subworkflow_tests.py index 8e76c03ceb..1ebced6d42 100644 --- a/nf_core/subworkflows/lint/subworkflow_tests.py +++ b/nf_core/subworkflows/lint/subworkflow_tests.py @@ -1,6 +1,7 @@ """ Lint the tests of a subworkflow in nf-core/modules """ +import json import logging from pathlib import Path @@ -24,10 +25,10 @@ def subworkflow_tests(_, subworkflow: NFCoreComponent): repo_dir = subworkflow.component_dir.parts[ : subworkflow.component_dir.parts.index(subworkflow.component_name.split("/")[0]) ][-1] - test_dir = Path(subworkflow.base_dir, "tests", "subworfklows", repo_dir, subworkflow.component_name) + test_dir = Path(subworkflow.base_dir, "tests", "subworkflows", repo_dir, subworkflow.component_name) pytest_main_nf = Path(test_dir, "main.nf") is_pytest = pytest_main_nf.is_file() - + log.debug(f"{pytest_main_nf} is pytest: {is_pytest}") if subworkflow.nftest_testdir.is_dir(): subworkflow.passed.append(("test_dir_exists", "nf-test test directory exists", subworkflow.nftest_testdir)) else: @@ -39,15 +40,15 @@ def subworkflow_tests(_, subworkflow: NFCoreComponent): # Lint the test main.nf file if subworkflow.nftest_main_nf.is_file(): - subworkflow.passed.append(("test_main_exists", "test `main.nf.test` exists", subworkflow.nftest_main_nf)) + subworkflow.passed.append(("test_main_nf_exists", "test `main.nf.test` exists", subworkflow.nftest_main_nf)) else: if is_pytest: subworkflow.warned.append( - ("test_main_exists", "test `main.nf.test` does not exist", subworkflow.nftest_main_nf) + ("test_main_nf_exists", "test `main.nf.test` does not exist", subworkflow.nftest_main_nf) ) else: subworkflow.failed.append( - ("test_main_exists", "test `main.nf.test` does not exist", subworkflow.nftest_main_nf) + ("test_main_nf_exists", "test `main.nf.test` does not exist", subworkflow.nftest_main_nf) ) if subworkflow.nftest_main_nf.is_file(): @@ -59,36 +60,64 @@ def subworkflow_tests(_, subworkflow: NFCoreComponent): subworkflow.passed.append(("test_snapshot_exists", "test `main.nf.test.snap` exists", snap_file)) # Validate no empty files with open(snap_file, "r") as snap_fh: - snap_content = snap_fh.read() - if "d41d8cd98f00b204e9800998ecf8427e" in snap_content: - subworkflow.failed.append( - ( - "test_snap_md5sum", - "md5sum for empty file found: d41d8cd98f00b204e9800998ecf8427e", - snap_file, - ) - ) - else: - subworkflow.passed.append( - ( - "test_snap_md5sum", - "no md5sum for empty file found", - snap_file, - ) - ) - if "7029066c27ac6f5ef18d660d5741979a" in snap_content: + try: + snap_content = json.load(snap_fh) + for test_name in snap_content.keys(): + if "d41d8cd98f00b204e9800998ecf8427e" in str(snap_content[test_name]): + if "stub" not in test_name: + subworkflow.failed.append( + ( + "test_snap_md5sum", + "md5sum for empty file found: d41d8cd98f00b204e9800998ecf8427e", + snap_file, + ) + ) + else: + subworkflow.passed.append( + ( + "test_snap_md5sum", + "md5sum for empty file found, but it is a stub test", + snap_file, + ) + ) + else: + subworkflow.passed.append( + ( + "test_snap_md5sum", + "no md5sum for empty file found", + snap_file, + ) + ) + if "7029066c27ac6f5ef18d660d5741979a" in str(snap_content[test_name]): + if "stub" not in test_name: + subworkflow.failed.append( + ( + "test_snap_md5sum", + "md5sum for compressed empty file found: 7029066c27ac6f5ef18d660d5741979a", + snap_file, + ) + ) + else: + subworkflow.failed.append( + ( + "test_snap_md5sum", + "md5sum for compressed empty file found, but it is a stub test", + snap_file, + ) + ) + else: + subworkflow.passed.append( + ( + "test_snap_md5sum", + "no md5sum for compressed empty file found", + snap_file, + ) + ) + except json.decoder.JSONDecodeError as e: subworkflow.failed.append( ( - "test_snap_md5sum", - "md5sum for compressed empty file found: 7029066c27ac6f5ef18d660d5741979a", - snap_file, - ) - ) - else: - subworkflow.passed.append( - ( - "test_snap_md5sum", - "no md5sum for compressed empty file found", + "test_snapshot_exists", + f"snapshot file `main.nf.test.snap` can't be read: {e}", snap_file, ) ) @@ -102,13 +131,16 @@ def subworkflow_tests(_, subworkflow: NFCoreComponent): "subworkflows", f"subworkflows/{subworkflow.component_name}", "subworkflows_nfcore", - subworkflow.component_name, ] included_components = [] if subworkflow.main_nf.is_file(): included_components = subworkflow._get_included_components(subworkflow.main_nf) + chained_components_tags = subworkflow._get_included_components_in_chained_tests(subworkflow.nftest_main_nf) + log.debug(f"Included components: {included_components}") + log.debug(f"Required tags: {required_tags}") + log.debug(f"Included components for chained nf-tests: {chained_components_tags}") missing_tags = [] - for tag in required_tags + included_components: + for tag in set(required_tags + included_components + chained_components_tags): if tag not in main_nf_tags: missing_tags.append(tag) if len(missing_tags) == 0: @@ -170,3 +202,11 @@ def subworkflow_tests(_, subworkflow: NFCoreComponent): subworkflow.warned.append(("test_tags_yml_exists", "file `tags.yml` does not exist", subworkflow.tags_yml)) else: subworkflow.failed.append(("test_tags_yml_exists", "file `tags.yml` does not exist", subworkflow.tags_yml)) + + # Check that the old test directory does not exist + if not is_pytest: + old_test_dir = Path(subworkflow.base_dir, "tests", "subworkflows", subworkflow.component_name) + if old_test_dir.is_dir(): + subworkflow.failed.append(("test_old_test_dir", "old test directory exists", old_test_dir)) + else: + subworkflow.passed.append(("test_old_test_dir", "old test directory does not exist", old_test_dir)) diff --git a/nf_core/synced_repo.py b/nf_core/synced_repo.py index 41e0853f2e..a2107f633c 100644 --- a/nf_core/synced_repo.py +++ b/nf_core/synced_repo.py @@ -3,9 +3,9 @@ import os import shutil from pathlib import Path +from typing import Dict import git -import rich import rich.progress from git.exc import GitCommandError @@ -61,7 +61,7 @@ class SyncedRepo: An object to store details about a locally cached code repository. """ - local_repo_statuses = {} + local_repo_statuses: Dict[str, bool] = {} no_pull_global = False @staticmethod diff --git a/nf_core/utils.py b/nf_core/utils.py index c57990a9d3..bcc8faa3fd 100644 --- a/nf_core/utils.py +++ b/nf_core/utils.py @@ -16,8 +16,9 @@ import subprocess import sys import time +from contextlib import contextmanager from pathlib import Path -from typing import Tuple, Union +from typing import Generator, Tuple, Union import git import prompt_toolkit @@ -1036,12 +1037,12 @@ def load_tools_config(directory: Union[str, Path] = "."): def determine_base_dir(directory="."): base_dir = start_dir = Path(directory).absolute() - while not get_first_available_path(base_dir, CONFIG_PATHS) and base_dir != base_dir.parent: + while base_dir != base_dir.parent: base_dir = base_dir.parent config_fn = get_first_available_path(base_dir, CONFIG_PATHS) if config_fn: - break - return directory if base_dir == start_dir else base_dir + return directory if base_dir == start_dir else base_dir + return directory def get_first_available_path(directory, paths): @@ -1147,3 +1148,48 @@ def validate_file_md5(file_name, expected_md5hex): raise IOError(f"{file_name} md5 does not match remote: {expected_md5hex} - {file_md5hex}") return True + + +def nested_setitem(d, keys, value): + """Sets the value in a nested dict using a list of keys to traverse + + Args: + d (dict): the nested dictionary to traverse + keys (list[Any]): A list of keys to iteratively traverse + value (Any): The value to be set for the last key in the chain + """ + current = d + for k in keys[:-1]: + current = current[k] + current[keys[-1]] = value + + +def nested_delitem(d, keys): + """Deletes a key from a nested dictionary + + Args: + d (dict): the nested dictionary to traverse + keys (list[Any]): A list of keys to iteratively traverse, deleting the final one + """ + current = d + for k in keys[:-1]: + current = current[k] + del current[keys[-1]] + + +@contextmanager +def set_wd(path: Path) -> Generator[None, None, None]: + """Sets the working directory for this context. + + Arguments + --------- + + path : Path + Path to the working directory to be used inside this context. + """ + start_wd = Path().absolute() + os.chdir(Path(path).resolve()) + try: + yield + finally: + os.chdir(start_wd) diff --git a/pyproject.toml b/pyproject.toml index f0702742fd..2380073107 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -20,8 +20,3 @@ norecursedirs = [ ".*", "build", "dist", "*.egg", "data", "__pycache__", ".githu profile = "black" known_first_party = ["nf_core"] multi_line_output = 3 - -[tool.mypy] -ignore_missing_imports = true -follow_imports = "skip" -disable_error_code = "no-redef" diff --git a/requirements-dev.txt b/requirements-dev.txt index c94874b193..13dba6f30d 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -8,4 +8,8 @@ Sphinx sphinx-rtd-theme mypy types-PyYAML +pyupgrade types-requests +types-jsonschema +types-Markdown +types-setuptools diff --git a/tests/components/generate_snapshot.py b/tests/components/generate_snapshot.py index c7eb696722..46fd63fe3f 100644 --- a/tests/components/generate_snapshot.py +++ b/tests/components/generate_snapshot.py @@ -6,8 +6,9 @@ import pytest from nf_core.components.components_test import ComponentsTest +from nf_core.utils import set_wd -from ..utils import GITLAB_NFTEST_BRANCH, GITLAB_URL, set_wd +from ..utils import GITLAB_NFTEST_BRANCH, GITLAB_URL def test_generate_snapshot_module(self): diff --git a/tests/components/snapshot_test.py b/tests/components/snapshot_test.py index 371f0d6fbe..d774618476 100644 --- a/tests/components/snapshot_test.py +++ b/tests/components/snapshot_test.py @@ -5,8 +5,7 @@ import pytest from nf_core.components.components_test import ComponentsTest - -from ..utils import set_wd +from nf_core.utils import set_wd def test_components_test_check_inputs(self): diff --git a/tests/lint/multiqc_config.py b/tests/lint/multiqc_config.py new file mode 100644 index 0000000000..446b4378b0 --- /dev/null +++ b/tests/lint/multiqc_config.py @@ -0,0 +1,106 @@ +from pathlib import Path + +import yaml + +import nf_core.lint + + +def test_multiqc_config_exists_ignore(self): + """Test that linting fails if the multiqc_config.yml file is missing""" + # Delete the file + new_pipeline = self._make_pipeline_copy() + Path(Path(new_pipeline, "assets", "multiqc_config.yml")).unlink() + lint_obj = nf_core.lint.PipelineLint(new_pipeline) + result = lint_obj.multiqc_config() + assert result["ignored"] == ["'assets/multiqc_config.yml' not found"] + + +def test_multiqc_config_missing_report_section_order(self): + """Test that linting fails if the multiqc_config.yml file is missing the report_section_order""" + new_pipeline = self._make_pipeline_copy() + with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "r") as fh: + mqc_yml = yaml.safe_load(fh) + mqc_yml_tmp = mqc_yml + mqc_yml.pop("report_section_order") + with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "w") as fh: + yaml.safe_dump(mqc_yml, fh) + lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj._load() + result = lint_obj.multiqc_config() + # Reset the file + with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "w") as fh: + yaml.safe_dump(mqc_yml_tmp, fh) + assert result["failed"] == ["'assets/multiqc_config.yml' does not contain `report_section_order`"] + + +def test_multiqc_incorrect_export_plots(self): + """Test that linting fails if the multiqc_config.yml file has an incorrect value for export_plots""" + new_pipeline = self._make_pipeline_copy() + with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "r") as fh: + mqc_yml = yaml.safe_load(fh) + mqc_yml_tmp = mqc_yml + mqc_yml["export_plots"] = False + with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "w") as fh: + yaml.safe_dump(mqc_yml, fh) + lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj._load() + result = lint_obj.multiqc_config() + # Reset the file + with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "w") as fh: + yaml.safe_dump(mqc_yml_tmp, fh) + assert result["failed"] == ["'assets/multiqc_config.yml' does not contain 'export_plots: true'."] + + +def test_multiqc_config_report_comment_fail(self): + """Test that linting fails if the multiqc_config.yml file has an incorrect report_comment""" + new_pipeline = self._make_pipeline_copy() + with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "r") as fh: + mqc_yml = yaml.safe_load(fh) + mqc_yml_tmp = mqc_yml + mqc_yml["report_comment"] = "This is a test" + with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "w") as fh: + yaml.safe_dump(mqc_yml, fh) + lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj._load() + result = lint_obj.multiqc_config() + # Reset the file + with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "w") as fh: + yaml.safe_dump(mqc_yml_tmp, fh) + assert len(result["failed"]) == 1 + assert result["failed"][0].startswith("'assets/multiqc_config.yml' does not contain a matching 'report_comment'.") + + +def test_multiqc_config_report_comment_release_fail(self): + """Test that linting fails if the multiqc_config.yml file has an incorrect report_comment for a release version""" + new_pipeline = self._make_pipeline_copy() + with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "r") as fh: + mqc_yml = yaml.safe_load(fh) + mqc_yml_tmp = mqc_yml + with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "w") as fh: + yaml.safe_dump(mqc_yml, fh) + lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj._load() + # bump version + lint_obj.nf_config["manifest.version"] = "1.0" + result = lint_obj.multiqc_config() + # Reset the file + with open(Path(new_pipeline, "assets", "multiqc_config.yml"), "w") as fh: + yaml.safe_dump(mqc_yml_tmp, fh) + assert len(result["failed"]) == 1 + assert result["failed"][0].startswith("'assets/multiqc_config.yml' does not contain a matching 'report_comment'.") + + +def test_multiqc_config_report_comment_release_succeed(self): + """Test that linting fails if the multiqc_config.yml file has a correct report_comment for a release version""" + + import nf_core.bump_version + + new_pipeline = self._make_pipeline_copy() + lint_obj = nf_core.lint.PipelineLint(new_pipeline) + lint_obj._load() + # bump version using the bump_version function + nf_core.bump_version.bump_pipeline_version(lint_obj, "1.0") + # lint again + lint_obj._load() + result = lint_obj.multiqc_config() + assert "'assets/multiqc_config.yml' contains a matching 'report_comment'." in result["passed"] diff --git a/tests/modules/create.py b/tests/modules/create.py index 7cfba484a1..74e5ec3896 100644 --- a/tests/modules/create.py +++ b/tests/modules/create.py @@ -1,11 +1,22 @@ +import filecmp import os +import shutil +from pathlib import Path +from unittest import mock import pytest import requests_cache import responses +import yaml +from git.repo import Repo import nf_core.modules -from tests.utils import mock_anaconda_api_calls, mock_biocontainers_api_calls +from tests.utils import ( + GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH, + GITLAB_URL, + mock_anaconda_api_calls, + mock_biocontainers_api_calls, +) def test_modules_create_succeed(self): @@ -65,3 +76,66 @@ def test_modules_create_nfcore_modules_subtool(self): assert os.path.exists( os.path.join(self.nfcore_modules, "modules", "nf-core", "star", "index", "tests", "main.nf.test") ) + + +@mock.patch("rich.prompt.Confirm.ask") +def test_modules_migrate(self, mock_rich_ask): + """Create a module with the --migrate-pytest option to convert pytest to nf-test""" + pytest_dir = Path(self.nfcore_modules, "tests", "modules", "nf-core", "samtools", "sort") + module_dir = Path(self.nfcore_modules, "modules", "nf-core", "samtools", "sort") + + # Clone modules repo with pytests + shutil.rmtree(self.nfcore_modules) + Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH) + with open(module_dir / "main.nf", "r") as fh: + old_main_nf = fh.read() + with open(module_dir / "meta.yml", "r") as fh: + old_meta_yml = fh.read() + + # Create a module with --migrate-pytest + mock_rich_ask.return_value = True + module_create = nf_core.modules.ModuleCreate(self.nfcore_modules, "samtools/sort", migrate_pytest=True) + module_create.create() + + with open(module_dir / "main.nf", "r") as fh: + new_main_nf = fh.read() + with open(module_dir / "meta.yml", "r") as fh: + new_meta_yml = fh.read() + nextflow_config = module_dir / "tests" / "nextflow.config" + + # Check that old files have been copied to the new module + assert old_main_nf == new_main_nf + assert old_meta_yml == new_meta_yml + assert nextflow_config.is_file() + + # Check that pytest folder is deleted + assert not pytest_dir.is_dir() + + # Check that pytest_modules.yml is updated + with open(Path(self.nfcore_modules, "tests", "config", "pytest_modules.yml")) as fh: + modules_yml = yaml.safe_load(fh) + assert "samtools/sort" not in modules_yml.keys() + + +@mock.patch("rich.prompt.Confirm.ask") +def test_modules_migrate_no_delete(self, mock_rich_ask): + """Create a module with the --migrate-pytest option to convert pytest to nf-test. + Test that pytest directory is not deleted.""" + pytest_dir = Path(self.nfcore_modules, "tests", "modules", "nf-core", "samtools", "sort") + + # Clone modules repo with pytests + shutil.rmtree(self.nfcore_modules) + Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH) + + # Create a module with --migrate-pytest + mock_rich_ask.return_value = False + module_create = nf_core.modules.ModuleCreate(self.nfcore_modules, "samtools/sort", migrate_pytest=True) + module_create.create() + + # Check that pytest folder is not deleted + assert pytest_dir.is_dir() + + # Check that pytest_modules.yml is updated + with open(Path(self.nfcore_modules, "tests", "config", "pytest_modules.yml")) as fh: + modules_yml = yaml.safe_load(fh) + assert "samtools/sort" not in modules_yml.keys() diff --git a/tests/modules/lint.py b/tests/modules/lint.py index a3a7a80b71..a8a775e6f6 100644 --- a/tests/modules/lint.py +++ b/tests/modules/lint.py @@ -2,11 +2,13 @@ import pytest import yaml +from git.repo import Repo import nf_core.modules from nf_core.modules.lint import main_nf +from nf_core.utils import set_wd -from ..utils import GITLAB_URL, set_wd +from ..utils import GITLAB_NFTEST_BRANCH, GITLAB_URL from .patch import BISMARK_ALIGN, CORRECT_SHA, PATCH_BRANCH, REPO_NAME, modify_main_nf @@ -419,3 +421,226 @@ def test_modules_environment_yml_file_name_mismatch(self): assert len(module_lint.passed) > 0 assert len(module_lint.warned) >= 0 assert module_lint.failed[0].lint_test == "environment_yml_name" + + +def test_modules_meta_yml_incorrect_licence_field(self): + """Test linting a module with an incorrect Licence field in meta.yml""" + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml")) as fh: + meta_yml = yaml.safe_load(fh) + meta_yml["tools"][0]["bpipe"]["licence"] = "[MIT]" + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml"), "w") as fh: + fh.write(yaml.dump(meta_yml)) + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + + # reset changes + meta_yml["tools"][0]["bpipe"]["licence"] = ["MIT"] + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml"), "w") as fh: + fh.write(yaml.dump(meta_yml)) + + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) >= 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "meta_yml_valid" + + +def test_modules_meta_yml_input_mismatch(self): + """Test linting a module with an extra entry in input fields in meta.yml compared to module.input""" + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf")) as fh: + main_nf = fh.read() + main_nf_new = main_nf.replace("path bam", "path bai") + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: + fh.write(main_nf_new) + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: + fh.write(main_nf) + assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) >= 0 + assert len(module_lint.warned) == 2 + lint_tests = [x.lint_test for x in module_lint.warned] + # check that it is there twice: + assert lint_tests.count("meta_input_meta_only") == 1 + assert lint_tests.count("meta_input_main_only") == 1 + + +def test_modules_meta_yml_output_mismatch(self): + """Test linting a module with an extra entry in output fields in meta.yml compared to module.output""" + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf")) as fh: + main_nf = fh.read() + main_nf_new = main_nf.replace("emit: bam", "emit: bai") + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: + fh.write(main_nf_new) + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "main.nf"), "w") as fh: + fh.write(main_nf) + assert len(module_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) >= 0 + assert len(module_lint.warned) == 2 + lint_tests = [x.lint_test for x in module_lint.warned] + # check that it is there twice: + assert lint_tests.count("meta_output_meta_only") == 1 + assert lint_tests.count("meta_output_main_only") == 1 + + +def test_modules_meta_yml_incorrect_name(self): + """Test linting a module with an incorrect name in meta.yml""" + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml")) as fh: + meta_yml = yaml.safe_load(fh) + meta_yml["name"] = "bpipe/test" + # need to make the same change to the environment.yml file + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml")) as fh: + environment_yml = yaml.safe_load(fh) + environment_yml["name"] = "bpipe/test" + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml"), "w") as fh: + fh.write(yaml.dump(meta_yml)) + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml"), "w") as fh: + fh.write(yaml.dump(environment_yml)) + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + + # reset changes + meta_yml["name"] = "bpipe_test" + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "meta.yml"), "w") as fh: + fh.write(yaml.dump(meta_yml)) + environment_yml["name"] = "bpipe_test" + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "environment.yml"), "w") as fh: + fh.write(yaml.dump(environment_yml)) + + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) >= 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "meta_name" + + +def test_modules_missing_test_dir(self): + """Test linting a module with a missing test directory""" + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests").rename( + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests.bak") + ) + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests.bak").rename( + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests") + ) + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) >= 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "test_dir_exists" + + +def test_modules_missing_test_main_nf(self): + """Test linting a module with a missing test/main.nf file""" + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test").rename( + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test.bak") + ) + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test.bak").rename( + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test") + ) + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) >= 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "test_main_nf_exists" + + +def test_modules_missing_required_tag(self): + """Test linting a module with a missing required tag""" + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test"), "r") as fh: + content = fh.read() + new_content = content.replace("modules_nfcore", "foo") + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test"), "w") as fh: + fh.write(new_content) + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test"), "w") as fh: + fh.write(content) + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) >= 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "test_main_tags" + + +def test_modules_missing_tags_yml(self): + """Test linting a module with a missing tags.yml file""" + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "tags.yml").rename( + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "tags.yml.bak") + ) + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "tags.yml.bak").rename( + Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "tags.yml") + ) + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) >= 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "test_tags_yml_exists" + + +def test_modules_incorrect_tags_yml_key(self): + """Test linting a module with an incorrect key in tags.yml file""" + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "tags.yml"), "r") as fh: + content = fh.read() + new_content = content.replace("bpipe/test:", "bpipe_test:") + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "tags.yml"), "w") as fh: + fh.write(new_content) + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=True, module="bpipe/test") + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "tags.yml"), "w") as fh: + fh.write(content) + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) >= 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "test_tags_yml" + + +def test_modules_incorrect_tags_yml_values(self): + """Test linting a module with an incorrect path in tags.yml file""" + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "tags.yml"), "r") as fh: + content = fh.read() + new_content = content.replace("modules/nf-core/bpipe/test/**", "foo") + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "tags.yml"), "w") as fh: + fh.write(new_content) + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + with open(Path(self.nfcore_modules, "modules", "nf-core", "bpipe", "test", "tests", "tags.yml"), "w") as fh: + fh.write(content) + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) >= 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "test_tags_yml" + + +def test_modules_unused_pytest_files(self): + """Test linting a nf-test module with files still present in `tests/modules/`""" + Path(self.nfcore_modules, "tests", "modules", "bpipe", "test").mkdir(parents=True, exist_ok=True) + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="bpipe/test") + Path(self.nfcore_modules, "tests", "modules", "bpipe", "test").rmdir() + assert len(module_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) >= 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "test_old_test_dir" + + +def test_nftest_failing_linting(self): + """Test linting a module which includes other modules in nf-test tests. + Linting tests""" + # Clone modules repo with testing modules + tmp_dir = self.nfcore_modules.parent + self.nfcore_modules = Path(tmp_dir, "modules-test") + Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_NFTEST_BRANCH) + + module_lint = nf_core.modules.ModuleLint(dir=self.nfcore_modules) + module_lint.lint(print_results=False, module="kallisto/quant") + + assert len(module_lint.failed) == 4, f"Linting failed with {[x.__dict__ for x in module_lint.failed]}" + assert len(module_lint.passed) >= 0 + assert len(module_lint.warned) >= 0 + assert module_lint.failed[0].lint_test == "environment_yml_valid" + assert module_lint.failed[1].lint_test == "meta_yml_valid" + assert module_lint.failed[2].lint_test == "test_main_tags" + assert "kallisto/index" in module_lint.failed[2].message + assert module_lint.failed[3].lint_test == "test_tags_yml" diff --git a/tests/subworkflows/create.py b/tests/subworkflows/create.py index 94c2a66331..fc628df34f 100644 --- a/tests/subworkflows/create.py +++ b/tests/subworkflows/create.py @@ -1,8 +1,15 @@ +import filecmp import os +import shutil +from pathlib import Path +from unittest import mock import pytest +import yaml +from git.repo import Repo import nf_core.subworkflows +from tests.utils import GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH, GITLAB_URL def test_subworkflows_create_succeed(self): @@ -35,3 +42,70 @@ def test_subworkflows_create_nfcore_modules(self): assert os.path.exists( os.path.join(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests", "main.nf.test") ) + + +@mock.patch("rich.prompt.Confirm.ask") +def test_subworkflows_migrate(self, mock_rich_ask): + """Create a subworkflow with the --migrate-pytest option to convert pytest to nf-test""" + pytest_dir = Path(self.nfcore_modules, "tests", "subworkflows", "nf-core", "bam_stats_samtools") + subworkflow_dir = Path(self.nfcore_modules, "subworkflows", "nf-core", "bam_stats_samtools") + + # Clone modules repo with pytests + shutil.rmtree(self.nfcore_modules) + Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH) + with open(subworkflow_dir / "main.nf", "r") as fh: + old_main_nf = fh.read() + with open(subworkflow_dir / "meta.yml", "r") as fh: + old_meta_yml = fh.read() + + # Create a subworkflow with --migrate-pytest + mock_rich_ask.return_value = True + subworkflow_create = nf_core.subworkflows.SubworkflowCreate( + self.nfcore_modules, "bam_stats_samtools", migrate_pytest=True + ) + subworkflow_create.create() + + with open(subworkflow_dir / "main.nf", "r") as fh: + new_main_nf = fh.read() + with open(subworkflow_dir / "meta.yml", "r") as fh: + new_meta_yml = fh.read() + nextflow_config = subworkflow_dir / "tests" / "nextflow.config" + + # Check that old files have been copied to the new module + assert old_main_nf == new_main_nf + assert old_meta_yml == new_meta_yml + assert nextflow_config.is_file() + + # Check that pytest folder is deleted + assert not pytest_dir.is_dir() + + # Check that pytest_modules.yml is updated + with open(Path(self.nfcore_modules, "tests", "config", "pytest_modules.yml")) as fh: + modules_yml = yaml.safe_load(fh) + assert "subworkflows/bam_stats_samtools" not in modules_yml.keys() + + +@mock.patch("rich.prompt.Confirm.ask") +def test_subworkflows_migrate_no_delete(self, mock_rich_ask): + """Create a subworkflow with the --migrate-pytest option to convert pytest to nf-test. + Test that pytest directory is not deleted.""" + pytest_dir = Path(self.nfcore_modules, "tests", "subworkflows", "nf-core", "bam_stats_samtools") + + # Clone modules repo with pytests + shutil.rmtree(self.nfcore_modules) + Repo.clone_from(GITLAB_URL, self.nfcore_modules, branch=GITLAB_SUBWORKFLOWS_ORG_PATH_BRANCH) + + # Create a module with --migrate-pytest + mock_rich_ask.return_value = False + module_create = nf_core.subworkflows.SubworkflowCreate( + self.nfcore_modules, "bam_stats_samtools", migrate_pytest=True + ) + module_create.create() + + # Check that pytest folder is not deleted + assert pytest_dir.is_dir() + + # Check that pytest_modules.yml is updated + with open(Path(self.nfcore_modules, "tests", "config", "pytest_modules.yml")) as fh: + modules_yml = yaml.safe_load(fh) + assert "subworkflows/bam_stats_samtools" not in modules_yml.keys() diff --git a/tests/subworkflows/lint.py b/tests/subworkflows/lint.py index 5bbe746f2e..1380db2260 100644 --- a/tests/subworkflows/lint.py +++ b/tests/subworkflows/lint.py @@ -28,9 +28,8 @@ def test_subworkflows_lint_new_subworkflow(self): """lint a new subworkflow""" subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) subworkflow_lint.lint(print_results=True, all_subworkflows=True) - assert ( - len(subworkflow_lint.failed) == 1 # test snap missing after creating a subworkflow - ), f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" + assert len(subworkflow_lint.failed) == 0 + assert len(subworkflow_lint.passed) > 0 assert len(subworkflow_lint.warned) >= 0 @@ -68,7 +67,6 @@ def test_subworkflows_lint_multiple_remotes(self): def test_subworkflows_lint_snapshot_file(self): """Test linting a subworkflow with a snapshot file""" - Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests", "main.nf.test.snap").touch() subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") assert len(subworkflow_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" @@ -78,8 +76,10 @@ def test_subworkflows_lint_snapshot_file(self): def test_subworkflows_lint_snapshot_file_missing_fail(self): """Test linting a subworkflow with a snapshot file missing, which should fail""" + Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests", "main.nf.test.snap").unlink() subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") + Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests", "main.nf.test.snap").touch() assert len(subworkflow_lint.failed) == 1, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" assert len(subworkflow_lint.passed) > 0 assert len(subworkflow_lint.warned) >= 0 @@ -96,8 +96,11 @@ def test_subworkflows_lint_snapshot_file_not_needed(self): Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests", "main.nf.test"), "w" ) as fh: fh.write(new_content) + + Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests", "main.nf.test.snap").unlink() subworkflow_lint = nf_core.subworkflows.SubworkflowLint(dir=self.nfcore_modules) subworkflow_lint.lint(print_results=False, subworkflow="test_subworkflow") + Path(self.nfcore_modules, "subworkflows", "nf-core", "test_subworkflow", "tests", "main.nf.test.snap").touch() assert len(subworkflow_lint.failed) == 0, f"Linting failed with {[x.__dict__ for x in subworkflow_lint.failed]}" assert len(subworkflow_lint.passed) > 0 assert len(subworkflow_lint.warned) >= 0 diff --git a/tests/test_download.py b/tests/test_download.py index feb2090b27..7c9532e977 100644 --- a/tests/test_download.py +++ b/tests/test_download.py @@ -264,6 +264,11 @@ def test_singularity_pull_image_singularity_installed(self, tmp_dir, mock_rich_p "hello-world", f"{tmp_dir}/hello-world.sif", None, "docker.io", mock_rich_progress ) + # Test successful pull with absolute URI (use tiny 3.5MB test container from the "Kogia" project: https://github.com/bschiffthaler/kogia) + download_obj.singularity_pull_image( + "docker.io/bschiffthaler/sed", f"{tmp_dir}/sed.sif", None, "docker.io", mock_rich_progress + ) + # try to pull from non-existing registry (Name change hello-world_new.sif is needed, otherwise ImageExists is raised before attempting to pull.) with pytest.raises(ContainerError.RegistryNotFound): download_obj.singularity_pull_image( @@ -290,6 +295,16 @@ def test_singularity_pull_image_singularity_installed(self, tmp_dir, mock_rich_p "a-container", f"{tmp_dir}/acontainer.sif", None, "ghcr.io", mock_rich_progress ) + # test Image not found for absolute URI. + with pytest.raises(ContainerError.ImageNotFound): + download_obj.singularity_pull_image( + "docker.io/bschiffthaler/nothingtopullhere", + f"{tmp_dir}/nothingtopullhere.sif", + None, + "docker.io", + mock_rich_progress, + ) + # Traffic from Github Actions to GitHub's Container Registry is unlimited, so no harm should be done here. with pytest.raises(ContainerError.InvalidTag): download_obj.singularity_pull_image( diff --git a/tests/test_lint.py b/tests/test_lint.py index 67104e3ad0..b2e7f3b574 100644 --- a/tests/test_lint.py +++ b/tests/test_lint.py @@ -30,6 +30,7 @@ def setUp(self): "testpipeline", "This is a test pipeline", "Test McTestFace", outdir=self.test_pipeline_dir, plain=True ) self.create_obj.init_pipeline() + # Base lint object on this directory self.lint_obj = nf_core.lint.PipelineLint(self.test_pipeline_dir) @@ -178,44 +179,52 @@ def test_sphinx_md_files(self): ####################### # SPECIFIC LINT TESTS # ####################### - from .lint.actions_awsfulltest import ( + from .lint.actions_awsfulltest import ( # type: ignore[misc] test_actions_awsfulltest_fail, test_actions_awsfulltest_pass, test_actions_awsfulltest_warn, ) - from .lint.actions_awstest import ( + from .lint.actions_awstest import ( # type: ignore[misc] test_actions_awstest_fail, test_actions_awstest_pass, ) - from .lint.actions_ci import ( + from .lint.actions_ci import ( # type: ignore[misc] test_actions_ci_fail_wrong_nf, test_actions_ci_fail_wrong_trigger, test_actions_ci_pass, ) - from .lint.actions_schema_validation import ( + from .lint.actions_schema_validation import ( # type: ignore[misc] test_actions_schema_validation_fails_for_additional_property, test_actions_schema_validation_missing_jobs, test_actions_schema_validation_missing_on, ) - from .lint.files_exist import ( + from .lint.files_exist import ( # type: ignore[misc] test_files_exist_depreciated_file, test_files_exist_missing_config, test_files_exist_missing_main, test_files_exist_pass, ) - from .lint.files_unchanged import ( + from .lint.files_unchanged import ( # type: ignore[misc] test_files_unchanged_fail, test_files_unchanged_pass, ) - from .lint.merge_markers import test_merge_markers_found - from .lint.modules_json import test_modules_json_pass - from .lint.nextflow_config import ( + from .lint.merge_markers import test_merge_markers_found # type: ignore[misc] + from .lint.modules_json import test_modules_json_pass # type: ignore[misc] + from .lint.multiqc_config import ( # type: ignore[misc] + test_multiqc_config_exists_ignore, + test_multiqc_config_missing_report_section_order, + test_multiqc_config_report_comment_fail, + test_multiqc_config_report_comment_release_fail, + test_multiqc_config_report_comment_release_succeed, + test_multiqc_incorrect_export_plots, + ) + from .lint.nextflow_config import ( # type: ignore[misc] test_nextflow_config_bad_name_fail, test_nextflow_config_dev_in_release_mode_failed, test_nextflow_config_example_pass, test_nextflow_config_missing_test_profile_failed, ) - from .lint.version_consistency import test_version_consistency + from .lint.version_consistency import test_version_consistency # type: ignore[misc] # TODO nf-core: Assess and strip out if no longer required for DSL2 diff --git a/tests/test_modules.py b/tests/test_modules.py index 39c600b986..92c8dfda3f 100644 --- a/tests/test_modules.py +++ b/tests/test_modules.py @@ -3,12 +3,12 @@ import os import shutil -import tempfile import unittest from pathlib import Path import requests_cache import responses +import yaml import nf_core.create import nf_core.modules @@ -45,16 +45,34 @@ def create_modules_repo_dummy(tmp_dir): module_create.create() # Remove doi from meta.yml which makes lint fail - meta_yml = Path(root_dir, "modules", "nf-core", "bpipe", "test", "meta.yml") - Path(root_dir, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test.snap").touch() - with open(meta_yml, "r") as fh: - lines = fh.readlines() - for line_index in range(len(lines)): - if "doi" in lines[line_index]: - to_pop = line_index - lines.pop(to_pop) - with open(meta_yml, "w") as fh: - fh.writelines(lines) + meta_yml_path = Path(root_dir, "modules", "nf-core", "bpipe", "test", "meta.yml") + + with open(meta_yml_path, "r") as fh: + meta_yml = yaml.safe_load(fh) + del meta_yml["tools"][0]["bpipe"]["doi"] + with open(meta_yml_path, "w") as fh: + yaml.dump(meta_yml, fh) + # Add dummy content to main.nf.test.snap + test_snap_path = Path(root_dir, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test.snap") + test_snap_path.touch() + with open(test_snap_path, "w") as fh: + fh.write('{\n "my test": {}\n}') + + # remove "TODO" statements from main.nf + main_nf_path = Path(root_dir, "modules", "nf-core", "bpipe", "test", "main.nf") + with open(main_nf_path, "r") as fh: + main_nf = fh.read() + main_nf = main_nf.replace("TODO", "") + with open(main_nf_path, "w") as fh: + fh.write(main_nf) + + # remove "TODO" statements from main.nf.test + main_nf_test_path = Path(root_dir, "modules", "nf-core", "bpipe", "test", "tests", "main.nf.test") + with open(main_nf_test_path, "r") as fh: + main_nf_test = fh.read() + main_nf_test = main_nf_test.replace("TODO", "") + with open(main_nf_test_path, "w") as fh: + fh.write(main_nf_test) return root_dir @@ -139,6 +157,8 @@ def test_modulesrepo_class(self): test_modules_create_nfcore_modules, test_modules_create_nfcore_modules_subtool, test_modules_create_succeed, + test_modules_migrate, + test_modules_migrate_no_delete, ) from .modules.info import ( # type: ignore[misc] test_modules_info_in_modules_repo, @@ -164,6 +184,8 @@ def test_modulesrepo_class(self): test_modules_environment_yml_file_not_array, test_modules_environment_yml_file_sorted_correctly, test_modules_environment_yml_file_sorted_incorrectly, + test_modules_incorrect_tags_yml_key, + test_modules_incorrect_tags_yml_values, test_modules_lint_check_process_labels, test_modules_lint_check_url, test_modules_lint_empty, @@ -176,6 +198,16 @@ def test_modulesrepo_class(self): test_modules_lint_snapshot_file_missing_fail, test_modules_lint_snapshot_file_not_needed, test_modules_lint_trimgalore, + test_modules_meta_yml_incorrect_licence_field, + test_modules_meta_yml_incorrect_name, + test_modules_meta_yml_input_mismatch, + test_modules_meta_yml_output_mismatch, + test_modules_missing_required_tag, + test_modules_missing_tags_yml, + test_modules_missing_test_dir, + test_modules_missing_test_main_nf, + test_modules_unused_pytest_files, + test_nftest_failing_linting, ) from .modules.list import ( # type: ignore[misc] test_modules_install_and_list_pipeline, diff --git a/tests/test_subworkflows.py b/tests/test_subworkflows.py index 33cad81e3d..19872ee168 100644 --- a/tests/test_subworkflows.py +++ b/tests/test_subworkflows.py @@ -4,6 +4,7 @@ import os import shutil import unittest +from pathlib import Path import nf_core.create import nf_core.modules @@ -21,22 +22,23 @@ def create_modules_repo_dummy(tmp_dir): """Create a dummy copy of the nf-core/modules repo""" - root_dir = os.path.join(tmp_dir, "modules") - os.makedirs(os.path.join(root_dir, "modules")) - os.makedirs(os.path.join(root_dir, "subworkflows")) - os.makedirs(os.path.join(root_dir, "subworkflows", "nf-core")) - os.makedirs(os.path.join(root_dir, "tests", "modules")) - os.makedirs(os.path.join(root_dir, "tests", "subworkflows")) - os.makedirs(os.path.join(root_dir, "tests", "config")) - with open(os.path.join(root_dir, "tests", "config", "pytest_modules.yml"), "w") as fh: - fh.writelines(["test:", "\n - modules/test/**", "\n - tests/modules/test/**"]) - with open(os.path.join(root_dir, ".nf-core.yml"), "w") as fh: + root_dir = Path(tmp_dir, "modules") + Path(root_dir, "modules").mkdir(parents=True, exist_ok=True) + Path(root_dir, "subworkflows").mkdir(parents=True, exist_ok=True) + Path(root_dir, "subworkflows", "nf-core").mkdir(parents=True, exist_ok=True) + Path(root_dir, "tests", "config").mkdir(parents=True, exist_ok=True) + with open(Path(root_dir, ".nf-core.yml"), "w") as fh: fh.writelines(["repository_type: modules", "\n", "org_path: nf-core", "\n"]) - # TODO Add a mock here subworkflow_create = nf_core.subworkflows.SubworkflowCreate(root_dir, "test_subworkflow", "@author", True) subworkflow_create.create() + # Add dummy content to main.nf.test.snap + test_snap_path = Path(root_dir, "subworkflows", "nf-core", "test_subworkflow", "tests", "main.nf.test.snap") + test_snap_path.touch() + with open(test_snap_path, "w") as fh: + fh.write('{\n "my test": {}\n}') + return root_dir @@ -95,6 +97,8 @@ def tearDown(self): test_subworkflows_create_fail_exists, test_subworkflows_create_nfcore_modules, test_subworkflows_create_succeed, + test_subworkflows_migrate, + test_subworkflows_migrate_no_delete, ) from .subworkflows.info import ( # type: ignore[misc] test_subworkflows_info_in_modules_repo, diff --git a/tests/test_test_utils.py b/tests/test_test_utils.py index c4e3d49ae0..154a31fca6 100644 --- a/tests/test_test_utils.py +++ b/tests/test_test_utils.py @@ -3,7 +3,7 @@ import pytest -from .utils import set_wd, with_temporary_file, with_temporary_folder +from .utils import with_temporary_file, with_temporary_folder def test_with_temporary_file(): @@ -30,20 +30,3 @@ def tmp_folder_exists(tmp_folder): def test_tmp_folder_does_not_exist_after(): tmp_folder = with_temporary_folder(lambda x: x)() assert not Path(tmp_folder).exists() - - -def test_set_wd(): - with tempfile.TemporaryDirectory() as tmpdirname: - with set_wd(tmpdirname): - context_wd = Path().resolve() - assert context_wd == Path(tmpdirname).resolve() - assert context_wd != Path().resolve() - - -def test_set_wd_revert_on_raise(): - wd_before_context = Path().resolve() - with tempfile.TemporaryDirectory() as tmpdirname: - with pytest.raises(Exception): - with set_wd(tmpdirname): - raise Exception - assert wd_before_context == Path().resolve() diff --git a/tests/test_utils.py b/tests/test_utils.py index 2ab5b64bfc..90d1886dbd 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -175,13 +175,13 @@ def test_get_repo_releases_branches_nf_core(self): def test_get_repo_releases_branches_not_nf_core(self): wfs = nf_core.list.Workflows() wfs.get_remote_workflows() - pipeline, wf_releases, wf_branches = nf_core.utils.get_repo_releases_branches("ewels/MultiQC", wfs) + pipeline, wf_releases, wf_branches = nf_core.utils.get_repo_releases_branches("MultiQC/MultiQC", wfs) for r in wf_releases: if r.get("tag_name") == "v1.10": break else: raise AssertionError("MultiQC release v1.10 not found") - assert "master" in wf_branches.keys() + assert "main" in wf_branches.keys() def test_get_repo_releases_branches_not_exists(self): wfs = nf_core.list.Workflows() @@ -207,3 +207,34 @@ def test_validate_file_md5(): nf_core.utils.validate_file_md5(test_file, different_md5) with pytest.raises(ValueError): nf_core.utils.validate_file_md5(test_file, non_hex_string) + + +def test_nested_setitem(): + d = {"a": {"b": {"c": "value"}}} + nf_core.utils.nested_setitem(d, ["a", "b", "c"], "value new") + assert d["a"]["b"]["c"] == "value new" + assert d == {"a": {"b": {"c": "value new"}}} + + +def test_nested_delitem(): + d = {"a": {"b": {"c": "value"}}} + nf_core.utils.nested_delitem(d, ["a", "b", "c"]) + assert "c" not in d["a"]["b"] + assert d == {"a": {"b": {}}} + + +def test_set_wd(): + with tempfile.TemporaryDirectory() as tmpdirname: + with nf_core.utils.set_wd(tmpdirname): + context_wd = Path().resolve() + assert context_wd == Path(tmpdirname).resolve() + assert context_wd != Path().resolve() + + +def test_set_wd_revert_on_raise(): + wd_before_context = Path().resolve() + with tempfile.TemporaryDirectory() as tmpdirname: + with pytest.raises(Exception): + with nf_core.utils.set_wd(tmpdirname): + raise Exception + assert wd_before_context == Path().resolve() diff --git a/tests/utils.py b/tests/utils.py index 307129b5b2..198ac3d583 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -59,24 +59,6 @@ def wrapper(*args: Any, **kwargs: Any) -> Any: return wrapper -@contextmanager -def set_wd(path: Path) -> Generator[None, None, None]: - """Sets the working directory for this context. - - Arguments - --------- - - path : Path - Path to the working directory to be used iside this context. - """ - start_wd = Path().absolute() - os.chdir(Path(path).resolve()) - try: - yield - finally: - os.chdir(start_wd) - - def mock_anaconda_api_calls(rsps: responses.RequestsMock, module: str, version: str) -> None: """Mock anaconda api calls for module""" anaconda_api_url = f"https://api.anaconda.org/package/bioconda/{module}"