diff --git a/.github/workflows/awsfulltest.yml b/.github/workflows/awsfulltest.yml deleted file mode 100644 index 128622b40a..0000000000 --- a/.github/workflows/awsfulltest.yml +++ /dev/null @@ -1,37 +0,0 @@ -name: nf-core AWS full size tests -# This workflow is triggered on published releases. -# It can be additionally triggered manually with GitHub actions workflow dispatch button. -# It runs the -profile 'test_full' on AWS batch - -on: - release: - types: [published] - workflow_dispatch: -jobs: - run-tower: - name: Run AWS full tests - if: github.repository == 'nf-core/sarek' - runs-on: ubuntu-latest - steps: - # Launch workflow using Tower CLI tool action - - name: Launch workflow via tower - uses: seqeralabs/action-tower-launch@v2 - with: - workspace_id: ${{ secrets.TOWER_WORKSPACE_ID }} - access_token: ${{ secrets.TOWER_ACCESS_TOKEN }} - compute_env: ${{ secrets.TOWER_COMPUTE_ENV }} - revision: ${{ github.sha }} - workdir: s3://${{ secrets.AWS_S3_BUCKET }}/work/sarek/work-${{ github.sha }}/somatic_test - parameters: | - { - "hook_url": "${{ secrets.MEGATESTS_ALERTS_SLACK_HOOK_URL }}", - "outdir": "s3://${{ secrets.AWS_S3_BUCKET }}/sarek/results-${{ github.sha }}/somatic_test" - } - profiles: test_full - - - uses: actions/upload-artifact@v3 - with: - name: Tower debug log file - path: | - tower_action_*.log - tower_action_*.json diff --git a/.github/workflows/awsfulltest_germline.yml b/.github/workflows/awsfulltest_germline.yml deleted file mode 100644 index f8dfc2cbc9..0000000000 --- a/.github/workflows/awsfulltest_germline.yml +++ /dev/null @@ -1,34 +0,0 @@ -name: nf-core AWS full size tests germline -# This workflow is triggered on published releases. -# It can be additionally triggered manually with GitHub actions workflow dispatch button. -# It runs the -profile 'test_full' on AWS batch - -on: - release: - types: [published] - workflow_dispatch: -jobs: - run-tower: - name: Run AWS full tests - if: github.repository == 'nf-core/sarek' - runs-on: ubuntu-latest - steps: - # Launch workflow using Tower CLI tool action - - name: Launch workflow via tower - uses: seqeralabs/action-tower-launch@v2 - with: - workspace_id: ${{ secrets.TOWER_WORKSPACE_ID }} - access_token: ${{ secrets.TOWER_ACCESS_TOKEN }} - compute_env: ${{ secrets.TOWER_COMPUTE_ENV }} - revision: ${{ github.sha }} - workdir: s3://${{ secrets.AWS_S3_BUCKET }}/work/sarek/work-${{ github.sha }}/germline_test - parameters: | - { - "hook_url": "${{ secrets.MEGATESTS_ALERTS_SLACK_HOOK_URL }}", - "outdir": "s3://${{ secrets.AWS_S3_BUCKET }}/sarek/results-${{ github.sha }}/germline_test" - } - profiles: test_full_germline - - uses: actions/upload-artifact@v3 - with: - name: Tower debug log file - path: tower_action_*.log diff --git a/.github/workflows/awstest.yml b/.github/workflows/awstest.yml index dea448ae99..e687dc5056 100644 --- a/.github/workflows/awstest.yml +++ b/.github/workflows/awstest.yml @@ -3,11 +3,27 @@ name: nf-core AWS test # It runs the -profile 'test' on AWS batch on: + release: + types: [created] workflow_dispatch: + inputs: + profiletest: + description: "Trigger profile tests (smaller) on AWS" + type: boolean + default: true + somatic: + description: "Trigger somatic full test on AWS" + type: boolean + default: false + germline: + description: "Trigger germline full test on AWS" + type: boolean + default: false + jobs: - run-tower: + trigger-profile-test: name: Run AWS tests - if: github.repository == 'nf-core/sarek' + if: ( github.repository == 'nf-core/sarek' ) && ( github.event_name != 'workflow_dispatch' || inputs.profiletest ) runs-on: ubuntu-latest steps: # Launch workflow using Tower CLI tool action @@ -26,7 +42,60 @@ jobs: profiles: test - uses: actions/upload-artifact@v3 with: - name: Tower debug log file + name: tower-profiletest-log path: | tower_action_*.log tower_action_*.json + + trigger-full-test-somatic: + name: Run AWS full tests + if: ${{ ( github.repository == 'nf-core/sarek' ) && ( github.event_name != 'workflow_dispatch' || inputs.somatic ) }} + runs-on: ubuntu-latest + steps: + # Launch workflow using Tower CLI tool action + - name: Launch workflow via tower + uses: seqeralabs/action-tower-launch@v2 + with: + workspace_id: ${{ secrets.TOWER_WORKSPACE_ID }} + access_token: ${{ secrets.TOWER_ACCESS_TOKEN }} + compute_env: ${{ secrets.TOWER_COMPUTE_ENV }} + revision: ${{ github.sha }} + workdir: s3://${{ secrets.AWS_S3_BUCKET }}/work/sarek/work-${{ github.sha }}/somatic_test + parameters: | + { + "hook_url": "${{ secrets.MEGATESTS_ALERTS_SLACK_HOOK_URL }}", + "outdir": "s3://${{ secrets.AWS_S3_BUCKET }}/sarek/results-${{ github.sha }}/somatic_test" + } + profiles: test_full + + - uses: actions/upload-artifact@v3 + with: + name: tower-full-somatic-log + path: | + tower_action_*.log + tower_action_*.json + + trigger-full-test-germline: + name: Run AWS full tests + if: ${{ ( github.repository == 'nf-core/sarek' ) && ( github.event_name != 'workflow_dispatch' || inputs.germline ) }} + runs-on: ubuntu-latest + steps: + # Launch workflow using Tower CLI tool action + - name: Launch workflow via tower + uses: seqeralabs/action-tower-launch@v2 + with: + workspace_id: ${{ secrets.TOWER_WORKSPACE_ID }} + access_token: ${{ secrets.TOWER_ACCESS_TOKEN }} + compute_env: ${{ secrets.TOWER_COMPUTE_ENV }} + revision: ${{ github.sha }} + workdir: s3://${{ secrets.AWS_S3_BUCKET }}/work/sarek/work-${{ github.sha }}/germline_test + parameters: | + { + "hook_url": "${{ secrets.MEGATESTS_ALERTS_SLACK_HOOK_URL }}", + "outdir": "s3://${{ secrets.AWS_S3_BUCKET }}/sarek/results-${{ github.sha }}/germline_test" + } + profiles: test_full_germline + - uses: actions/upload-artifact@v3 + with: + name: tower-full-germline-log + path: tower_action_*.log diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 3ba7eb31f6..841a37d6de 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,11 +1,18 @@ -name: nf-core CI +name: test # This workflow runs the pipeline with the minimal test dataset to check that it completes without any syntax errors on: push: - branches: [dev] + branches: + - dev pull_request: release: types: [published] + merge_group: + types: + - checks_requested + branches: + - master + - dev # Cancel if a newer run is started concurrency: @@ -13,25 +20,45 @@ concurrency: cancel-in-progress: true jobs: + changes: + name: Check for changes + runs-on: ubuntu-latest + outputs: + # Expose matched filters as job 'tags' output variable + tags: ${{ steps.filter.outputs.changes }} + steps: + - uses: actions/checkout@v3 + - uses: dorny/paths-filter@v2 + id: filter + with: + filters: "tests/config/tags.yml" + test: - name: Run pipeline with test data - # Only run on push if this is the nf-core dev branch (merged PRs) - if: "${{ github.event_name != 'push' || (github.event_name == 'push' && github.repository == 'nf-core/sarek') }}" + name: ${{ matrix.tags }} ${{ matrix.profile }} NF ${{ matrix.NXF_VER }} runs-on: ubuntu-latest + needs: changes + if: needs.changes.outputs.tags != '[]' strategy: - # HACK Remove after DSL2 rewrite is done fail-fast: false matrix: + tags: ["${{ fromJson(needs.changes.outputs.tags) }}"] + profile: ["docker", "singularity"] + TEST_DATA_BASE: + - "test-datasets/data" NXF_VER: - "23.04.0" - "latest-everything" - test: - - "default" - profile: ["docker"] - # profile: ["docker", "singularity", "conda"] + exclude: + - profile: "singularity" + tags: concatenate_vcfs + - profile: "singularity" + tags: merge + - profile: "singularity" + tags: validation_checks env: NXF_ANSI_LOG: false TEST_DATA_BASE: "${{ github.workspace }}/test-datasets" + SENTIEON_LICENSE_BASE64: ${{ secrets.SENTIEON_LICENSE_BASE64 }} steps: - name: Check out pipeline code uses: actions/checkout@v3 @@ -41,44 +68,16 @@ jobs: run: | echo "digest=$(echo sarek3_${{ github.workspace }} | md5sum | cut -c 1-25)" >> $GITHUB_OUTPUT - - name: Cache test data - id: cache-testdata - uses: actions/cache@v3 - with: - path: test-datasets/ - key: ${{ steps.hash_workspace.outputs.digest }} - - - name: Check out test data - if: steps.cache-testdata.outputs.cache-hit != 'true' - uses: actions/checkout@v3 - with: - repository: nf-core/test-datasets - ref: sarek3 - path: test-datasets/ - - - name: Replace remote paths in samplesheets - run: | - for f in tests/csv/3.0/*csv; do - sed -i "s=https://raw.githubusercontent.com/nf-core/test-datasets/modules/=${{ github.workspace }}/test-datasets/=g" $f - echo "========== $f ============" - cat $f - echo "========================================" - done; - - name: Set up Python uses: actions/setup-python@v4 with: python-version: "3.x" - - - uses: actions/cache@v3 - with: - path: ~/.cache/pip - key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} - restore-keys: | - ${{ runner.os }}-pip- + cache: "pip" + cache-dependency-path: | + **/requirements.txt - name: Install Python dependencies - run: python -m pip install --upgrade pip pytest-workflow + run: pip install --upgrade -r tests/requirements.txt - name: Install Nextflow ${{ matrix.NXF_VER }} uses: nf-core/setup-nextflow@v1 @@ -99,14 +98,50 @@ jobs: channels: conda-forge,bioconda,defaults python-version: ${{ matrix.python-version }} + - name: Cache test data + id: cache-testdata + uses: actions/cache@v3 + with: + path: test-datasets/ + key: ${{ steps.hash_workspace.outputs.digest }} + + - name: Check out test data + if: steps.cache-testdata.outputs.cache-hit != 'true' + uses: actions/checkout@v3 + with: + repository: nf-core/test-datasets + ref: sarek3 + path: test-datasets/ + + - name: Replace remote paths in samplesheets + run: | + for f in tests/csv/3.0/*csv; do + sed -i "s=https://raw.githubusercontent.com/nf-core/test-datasets/modules/=${{ github.workspace }}/test-datasets/=g" $f + echo "========== $f ============" + cat $f + echo "========================================" + done; + + # Set up secrets + - name: Set up nextflow secrets + if: env.SENTIEON_LICENSE_BASE64 != null + run: | + nextflow secrets set SENTIEON_LICENSE_BASE64 ${{ secrets.SENTIEON_LICENSE_BASE64 }} + nextflow secrets set SENTIEON_AUTH_MECH_BASE64 ${{ secrets.SENTIEON_AUTH_MECH_BASE64 }} + SENTIEON_ENCRYPTION_KEY=$(echo -n "${{ secrets.ENCRYPTION_KEY_BASE64 }}" | base64 -d) + SENTIEON_LICENSE_MESSAGE=$(echo -n "${{ secrets.LICENSE_MESSAGE_BASE64 }}" | base64 -d) + SENTIEON_AUTH_DATA=$(python bin/license_message.py encrypt --key "$SENTIEON_ENCRYPTION_KEY" --message "$SENTIEON_LICENSE_MESSAGE") + SENTIEON_AUTH_DATA_BASE64=$(echo -n "$SENTIEON_AUTH_DATA" | base64 -w 0) + nextflow secrets set SENTIEON_AUTH_DATA_BASE64 $SENTIEON_AUTH_DATA_BASE64 + - name: Conda clean if: matrix.profile == 'conda' run: conda clean -a - - name: Run pipeline with tests settings + - name: Run pytest-workflow uses: Wandalen/wretry.action@v1 with: - command: TMPDIR=~ PROFILE=${{ matrix.profile }} pytest --tag ${{ matrix.test }} --symlink --kwdof --git-aware --color=yes + command: TMPDIR=~ PROFILE=${{ matrix.profile }} pytest --tag ${{ matrix.tags }} --symlink --kwdof --git-aware --color=yes attempt_limit: 3 - name: Output log on failure @@ -127,3 +162,22 @@ jobs: /home/runner/pytest_workflow_*/*/work !/home/runner/pytest_workflow_*/*/work/conda !/home/runner/pytest_workflow_*/*/work/singularity + + confirm-pass: + runs-on: ubuntu-latest + needs: + - test + if: always() + steps: + - name: All tests ok + if: ${{ success() || !contains(needs.*.result, 'failure') }} + run: exit 0 + - name: One or more tests failed + if: ${{ contains(needs.*.result, 'failure') }} + run: exit 1 + + - name: debug-print + if: always() + run: | + echo "toJSON(needs) = ${{ toJSON(needs) }}" + echo "toJSON(needs.*.result) = ${{ toJSON(needs.*.result) }}" diff --git a/.github/workflows/pytest-workflow.yml b/.github/workflows/pytest-workflow.yml deleted file mode 100644 index 866704e9c4..0000000000 --- a/.github/workflows/pytest-workflow.yml +++ /dev/null @@ -1,177 +0,0 @@ -name: pytest-workflow -# This workflow runs the pipeline with the minimal test dataset to check that it completes without any syntax errors -on: - pull_request: - branches: [dev] - -# Cancel if a newer run is started -concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} - cancel-in-progress: true - -jobs: - changes: - name: Check for changes - runs-on: ubuntu-latest - outputs: - # Expose matched filters as job 'tags' output variable - tags: ${{ steps.filter.outputs.changes }} - steps: - - uses: actions/checkout@v3 - - uses: dorny/paths-filter@v2 - id: filter - with: - filters: "tests/config/tags.yml" - - test: - name: ${{ matrix.tags }} ${{ matrix.profile }} NF ${{ matrix.NXF_VER }} - runs-on: ubuntu-latest - needs: changes - if: needs.changes.outputs.tags != '[]' - strategy: - fail-fast: false - matrix: - tags: ["${{ fromJson(needs.changes.outputs.tags) }}"] - profile: ["docker"] - # profile: ["docker", "singularity", "conda"] - TEST_DATA_BASE: - - "test-datasets/data" - NXF_VER: - - "23.04.0" - - "latest-everything" - exclude: - # - profile: "conda" - # tags: concatenate_vcfs - # - profile: "conda" - # tags: deepvariant - # - profile: "conda" - # tags: haplotypecaller - # - profile: "conda" - # tags: merge - # - profile: "conda" - # tags: snpeff - # - profile: "conda" - # tags: umi - # - profile: "conda" - # tags: validation_checks - # - profile: "conda" - # tags: vep - # - profile: "conda" - # tags: sentieon/bwamem - - profile: "singularity" - tags: concatenate_vcfs - - profile: "singularity" - tags: merge - - profile: "singularity" - tags: validation_checks - env: - NXF_ANSI_LOG: false - TEST_DATA_BASE: "${{ github.workspace }}/test-datasets" - SENTIEON_LICENSE_BASE64: ${{ secrets.SENTIEON_LICENSE_BASE64 }} - steps: - - name: Check out pipeline code - uses: actions/checkout@v3 - - - name: Hash Github Workspace - id: hash_workspace - run: | - echo "digest=$(echo sarek3_${{ github.workspace }} | md5sum | cut -c 1-25)" >> $GITHUB_OUTPUT - - - name: Cache test data - id: cache-testdata - uses: actions/cache@v3 - with: - path: test-datasets/ - key: ${{ steps.hash_workspace.outputs.digest }} - - - name: Check out test data - if: steps.cache-testdata.outputs.cache-hit != 'true' - uses: actions/checkout@v3 - with: - repository: nf-core/test-datasets - ref: sarek3 - path: test-datasets/ - - - name: Replace remote paths in samplesheets - run: | - for f in tests/csv/3.0/*csv; do - sed -i "s=https://raw.githubusercontent.com/nf-core/test-datasets/modules/=${{ github.workspace }}/test-datasets/=g" $f - echo "========== $f ============" - cat $f - echo "========================================" - done; - - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: "3.x" - - - uses: actions/cache@v3 - with: - path: ~/.cache/pip - key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} - restore-keys: | - ${{ runner.os }}-pip- - - - name: Install Python dependencies - run: python -m pip install --upgrade pip pytest-workflow cryptography - - - name: Install Nextflow ${{ matrix.NXF_VER }} - uses: nf-core/setup-nextflow@v1 - with: - version: "${{ matrix.NXF_VER }}" - - - name: Set up Singularity - if: matrix.profile == 'singularity' - uses: eWaterCycle/setup-singularity@v5 - with: - singularity-version: 3.7.1 - - - name: Set up miniconda - if: matrix.profile == 'conda' - uses: conda-incubator/setup-miniconda@v2 - with: - auto-update-conda: true - channels: conda-forge,bioconda,defaults - python-version: ${{ matrix.python-version }} - - # Set up secrets - - name: Set up nextflow secrets - if: env.SENTIEON_LICENSE_BASE64 != null - run: | - nextflow secrets set SENTIEON_LICENSE_BASE64 ${{ secrets.SENTIEON_LICENSE_BASE64 }} - nextflow secrets set SENTIEON_AUTH_MECH_BASE64 ${{ secrets.SENTIEON_AUTH_MECH_BASE64 }} - SENTIEON_ENCRYPTION_KEY=$(echo -n "${{ secrets.ENCRYPTION_KEY_BASE64 }}" | base64 -d) - SENTIEON_LICENSE_MESSAGE=$(echo -n "${{ secrets.LICENSE_MESSAGE_BASE64 }}" | base64 -d) - SENTIEON_AUTH_DATA=$(python bin/license_message.py encrypt --key "$SENTIEON_ENCRYPTION_KEY" --message "$SENTIEON_LICENSE_MESSAGE") - SENTIEON_AUTH_DATA_BASE64=$(echo -n "$SENTIEON_AUTH_DATA" | base64 -w 0) - nextflow secrets set SENTIEON_AUTH_DATA_BASE64 $SENTIEON_AUTH_DATA_BASE64 - - - name: Conda clean - if: matrix.profile == 'conda' - run: conda clean -a - - - name: Run pytest-workflow - uses: Wandalen/wretry.action@v1 - with: - command: TMPDIR=~ PROFILE=${{ matrix.profile }} pytest --tag ${{ matrix.tags }} --symlink --kwdof --git-aware --color=yes - attempt_limit: 3 - - - name: Output log on failure - if: failure() - run: | - sudo apt install bat > /dev/null - batcat --decorations=always --color=always /home/runner/pytest_workflow_*/*/log.{out,err} - - - name: Upload logs on failure - if: failure() - uses: actions/upload-artifact@v2 - with: - name: logs-${{ matrix.profile }} - path: | - /home/runner/pytest_workflow_*/*/.nextflow.log - /home/runner/pytest_workflow_*/*/log.out - /home/runner/pytest_workflow_*/*/log.err - /home/runner/pytest_workflow_*/*/work - !/home/runner/pytest_workflow_*/*/work/conda - !/home/runner/pytest_workflow_*/*/work/singularity diff --git a/.github/workflows/pytest-workflow_release.yml b/.github/workflows/pytest-workflow_release.yml deleted file mode 100644 index f133d50d68..0000000000 --- a/.github/workflows/pytest-workflow_release.yml +++ /dev/null @@ -1,147 +0,0 @@ -name: pytest-workflow-release -# This workflow runs the pipeline with the minimal test dataset to check that it completes without any syntax errors -on: - pull_request: - branches: [master] - release: - types: [published] - -# Cancel if a newer run is started -concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} - cancel-in-progress: true - -jobs: - changes: - name: Check for changes - runs-on: ubuntu-latest - outputs: - # Expose matched filters as job 'tags' output variable - tags: ${{ steps.filter.outputs.changes }} - steps: - - uses: actions/checkout@v3 - - uses: dorny/paths-filter@v2 - id: filter - with: - filters: "tests/config/tags.yml" - - test: - name: ${{ matrix.tags }} ${{ matrix.profile }} NF ${{ matrix.NXF_VER }} - runs-on: ubuntu-latest - needs: changes - if: needs.changes.outputs.tags != '[]' - strategy: - fail-fast: false - matrix: - tags: ["${{ fromJson(needs.changes.outputs.tags) }}"] - profile: ["docker", "singularity"] - TEST_DATA_BASE: - - "test-datasets/data" - NXF_VER: - - "23.04.0" - - "latest-everything" - exclude: - - profile: "singularity" - tags: concatenate_vcfs - - profile: "singularity" - tags: merge - - profile: "singularity" - tags: validation_checks - env: - NXF_ANSI_LOG: false - TEST_DATA_BASE: "${{ github.workspace }}/test-datasets" - steps: - - name: Check out pipeline code - uses: actions/checkout@v3 - - - name: Hash Github Workspace - id: hash_workspace - run: | - echo "digest=$(echo sarek3_${{ github.workspace }} | md5sum | cut -c 1-25)" >> $GITHUB_OUTPUT - - - name: Cache test data - id: cache-testdata - uses: actions/cache@v3 - with: - path: test-datasets/ - key: ${{ steps.hash_workspace.outputs.digest }} - - - name: Check out test data - if: steps.cache-testdata.outputs.cache-hit != 'true' - uses: actions/checkout@v3 - with: - repository: nf-core/test-datasets - ref: sarek3 - path: test-datasets/ - - - name: Replace remote paths in samplesheets - run: | - for f in tests/csv/3.0/*csv; do - sed -i "s=https://raw.githubusercontent.com/nf-core/test-datasets/modules/=${{ github.workspace }}/test-datasets/=g" $f - echo "========== $f ============" - cat $f - echo "========================================" - done; - - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: "3.x" - - - uses: actions/cache@v3 - with: - path: ~/.cache/pip - key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} - restore-keys: | - ${{ runner.os }}-pip- - - - name: Install Python dependencies - run: python -m pip install --upgrade pip pytest-workflow - - - name: Install Nextflow ${{ matrix.NXF_VER }} - uses: nf-core/setup-nextflow@v1 - with: - version: "${{ matrix.NXF_VER }}" - - - name: Set up Singularity - if: matrix.profile == 'singularity' - uses: eWaterCycle/setup-singularity@v5 - with: - singularity-version: 3.7.1 - - - name: Set up miniconda - if: matrix.profile == 'conda' - uses: conda-incubator/setup-miniconda@v2 - with: - auto-update-conda: true - channels: conda-forge,bioconda,defaults - python-version: ${{ matrix.python-version }} - - - name: Conda clean - if: matrix.profile == 'conda' - run: conda clean -a - - - name: Run pytest-workflow - uses: Wandalen/wretry.action@v1 - with: - command: TMPDIR=~ PROFILE=${{ matrix.profile }} pytest --tag ${{ matrix.tags }} --symlink --kwdof --git-aware --color=yes - attempt_limit: 3 - - - name: Output log on failure - if: failure() - run: | - sudo apt install bat > /dev/null - batcat --decorations=always --color=always /home/runner/pytest_workflow_*/*/log.{out,err} - - - name: Upload logs on failure - if: failure() - uses: actions/upload-artifact@v2 - with: - name: logs-${{ matrix.profile }} - path: | - /home/runner/pytest_workflow_*/*/.nextflow.log - /home/runner/pytest_workflow_*/*/log.out - /home/runner/pytest_workflow_*/*/log.err - /home/runner/pytest_workflow_*/*/work - !/home/runner/pytest_workflow_*/*/work/conda - !/home/runner/pytest_workflow_*/*/work/singularity diff --git a/CHANGELOG.md b/CHANGELOG.md index d084451742..9bc381a380 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -33,6 +33,7 @@ Rapaselet is a delta formed by the RapaƤtno river between the Bielloriehppe mas - [#1179](https://github.com/nf-core/sarek/pull/1179) - Unhide params `trim_fastq`, `umi_read_structure`, and `aligner` - [#1180](https://github.com/nf-core/sarek/pull/1180) - Updating the nf-core modules - [#1198](https://github.com/nf-core/sarek/pull/1198) - Prepare release `3.3.0` +- [#1200](https://github.com/nf-core/sarek/pull/1200) - Streamline Github Actions workflows ### Fixed diff --git a/tests/config/tags.yml b/tests/config/tags.yml index 017246e117..c8e001a193 100644 --- a/tests/config/tags.yml +++ b/tests/config/tags.yml @@ -1,3 +1,7 @@ +# default +default: + - "**" + # default_extended tumor_normal_pair: @@ -107,21 +111,21 @@ bwamem2: - conf/modules/aligner.config - modules/nf-core/bwamem2/mem/main.nf - subworkflows/local/fastq_align_bwamem_mem2_dragmap_sentieon/main.nf - - tests/test_alignment_bwamem2.yml + - tests/test_aligner_bwamem2.yml ### dragmap dragmap: - conf/modules/aligner.config - modules/nf-core/dragmap/align/main.nf - subworkflows/local/fastq_align_bwamem_mem2_dragmap_sentieon/main.nf - - tests/test_alignment_dragmap.yml + - tests/test_aligner_dragmap.yml ### sentieon/bwamem sentieon/bwamem: - conf/modules/aligner.config - modules/nf-core/sentieon/bwamem/main.nf - subworkflows/local/fastq_align_bwamem_mem2_dragmap_sentieon/main.nf - - tests/test_alignment_sentieon_bwamem.yml + - tests/test_aligner_sentieon_bwamem.yml ## markduplicates gatk4/markduplicates: diff --git a/tests/requirements.txt b/tests/requirements.txt new file mode 100644 index 0000000000..095a66001f --- /dev/null +++ b/tests/requirements.txt @@ -0,0 +1,3 @@ +pip +pytest-workflow +cryptography