From 21ecd68bfe415ee78173813667f1bb0d465f7f4b Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Thu, 27 Jan 2022 08:43:02 +0100 Subject: [PATCH 001/283] fix: remove left-over unnecessary code --- modules/deeparg/predict/main.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/deeparg/predict/main.nf b/modules/deeparg/predict/main.nf index 1af0fd40..9408fa3d 100644 --- a/modules/deeparg/predict/main.nf +++ b/modules/deeparg/predict/main.nf @@ -11,7 +11,7 @@ process DEEPARG_PREDICT { input: tuple val(meta), path(fasta), val(model) - tuple path(db) + path(db) output: tuple val(meta), path("*.align.daa") , emit: daa From 8a20253f4028133b589c2c169aaa68a5a7fe848d Mon Sep 17 00:00:00 2001 From: Sateesh <33637490+sateeshperi@users.noreply.github.com> Date: Fri, 25 Feb 2022 13:06:39 -0500 Subject: [PATCH 002/283] update args & convert to bam (#1355) --- modules/picard/cleansam/main.nf | 12 +++++------- modules/picard/cleansam/meta.yml | 11 +++++------ tests/modules/picard/cleansam/test.yml | 4 ++-- 3 files changed, 12 insertions(+), 15 deletions(-) diff --git a/modules/picard/cleansam/main.nf b/modules/picard/cleansam/main.nf index 2eb171d5..fb435911 100644 --- a/modules/picard/cleansam/main.nf +++ b/modules/picard/cleansam/main.nf @@ -1,6 +1,6 @@ process PICARD_CLEANSAM { tag "$meta.id" - label 'process_low' + label 'process_medium' conda (params.enable_conda ? "bioconda::picard=2.26.9" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? @@ -8,10 +8,10 @@ process PICARD_CLEANSAM { 'quay.io/biocontainers/picard:2.26.9--hdfd78af_0' }" input: - tuple val(meta), path(sam) + tuple val(meta), path(bam) output: - tuple val(meta), path("*.sam"), emit: sam + tuple val(meta), path("*.bam"), emit: bam path "versions.yml" , emit: versions when: @@ -20,7 +20,6 @@ process PICARD_CLEANSAM { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" - def STRINGENCY = task.ext.stringency ?: "STRICT" def avail_mem = 3 if (!task.memory) { log.info '[Picard CleanSam] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' @@ -32,9 +31,8 @@ process PICARD_CLEANSAM { -Xmx${avail_mem}g \\ CleanSam \\ ${args} \\ - -I ${sam} \\ - -O ${prefix}.sam \\ - --VALIDATION_STRINGENCY ${STRINGENCY} + -I ${bam} \\ + -O ${prefix}.bam cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/modules/picard/cleansam/meta.yml b/modules/picard/cleansam/meta.yml index d22e1742..11d8b7c4 100644 --- a/modules/picard/cleansam/meta.yml +++ b/modules/picard/cleansam/meta.yml @@ -1,8 +1,7 @@ name: picard_cleansam -description: Cleans the provided SAM/BAM, soft-clipping beyond-end-of-reference alignments and setting MAPQ to 0 for unmapped reads +description: Cleans the provided BAM, soft-clipping beyond-end-of-reference alignments and setting MAPQ to 0 for unmapped reads keywords: - clean - - sam - bam tools: - picard: @@ -22,8 +21,8 @@ input: e.g. [ id:'test', single_end:false ] - sam: type: file - description: SAM file - pattern: "*.{sam}" + description: BAM file + pattern: "*.{bam}" output: - meta: @@ -37,8 +36,8 @@ output: pattern: "versions.yml" - sam: type: file - description: Cleaned SAM file - pattern: "*.{sam}" + description: Cleaned BAM file + pattern: "*.{bam}" authors: - "@sateeshperi" diff --git a/tests/modules/picard/cleansam/test.yml b/tests/modules/picard/cleansam/test.yml index 716dfe6a..3b235d07 100644 --- a/tests/modules/picard/cleansam/test.yml +++ b/tests/modules/picard/cleansam/test.yml @@ -4,7 +4,7 @@ - picard/cleansam - picard files: - - path: output/picard/test.sam - md5sum: e314171a6060eb79947c13ad126ddf00 + - path: output/picard/test.bam + md5sum: a48f8e77a1480445efc57570c3a38a68 - path: output/picard/versions.yml md5sum: e6457d7c6de51bf6f4b577eda65e57ac From e0bf0a168976ab73fc511e49cd3e68a18f0680ec Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Sun, 27 Feb 2022 14:35:47 +0100 Subject: [PATCH 003/283] Fix: Leehom meta output channel descriptions (#1356) * fix: remove left-over unnecessary code * Fix metayml docs of R2 output channels --- modules/leehom/meta.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/leehom/meta.yml b/modules/leehom/meta.yml index 658db227..05a52743 100644 --- a/modules/leehom/meta.yml +++ b/modules/leehom/meta.yml @@ -61,11 +61,11 @@ output: pattern: "*.r1.fail.fq.gz" - unmerged_r2_fq_pass: type: file - description: Passed unmerged R1 FASTQs + description: Passed unmerged R2 FASTQs pattern: "*.r2.fq.gz" - unmerged_r2_fq_pass: type: file - description: Failed unmerged R1 FASTQs + description: Failed unmerged R2 FASTQs pattern: "*.r2.fail.fq.gz" - log: type: file From 55bee0b02e78394e6534049f6bf607abda057271 Mon Sep 17 00:00:00 2001 From: "Robert A. Petit III" Date: Sun, 27 Feb 2022 17:58:35 -0700 Subject: [PATCH 004/283] add module for hpsuisero (#1331) * add module for hpsuisero * Update meta.yml * Update main.nf Co-authored-by: Sateesh <33637490+sateeshperi@users.noreply.github.com> --- modules/hpsuissero/main.nf | 44 ++++++++++++++++++++++++ modules/hpsuissero/meta.yml | 43 +++++++++++++++++++++++ tests/config/pytest_modules.yml | 4 +++ tests/modules/hpsuissero/main.nf | 15 ++++++++ tests/modules/hpsuissero/nextflow.config | 5 +++ tests/modules/hpsuissero/test.yml | 9 +++++ 6 files changed, 120 insertions(+) create mode 100644 modules/hpsuissero/main.nf create mode 100644 modules/hpsuissero/meta.yml create mode 100644 tests/modules/hpsuissero/main.nf create mode 100644 tests/modules/hpsuissero/nextflow.config create mode 100644 tests/modules/hpsuissero/test.yml diff --git a/modules/hpsuissero/main.nf b/modules/hpsuissero/main.nf new file mode 100644 index 00000000..4b31f91c --- /dev/null +++ b/modules/hpsuissero/main.nf @@ -0,0 +1,44 @@ +def VERSION = '1.0.1' // Version information not provided by tool on CLI + +process HPSUISSERO { + tag "$meta.id" + label 'process_low' + + conda (params.enable_conda ? "bioconda::hpsuissero=1.0.1" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/hpsuissero%3A1.0.1--hdfd78af_0': + 'quay.io/biocontainers/hpsuissero:1.0.1--hdfd78af_0' }" + + input: + tuple val(meta), path(fasta) + + output: + tuple val(meta), path("*.tsv"), emit: tsv + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def is_compressed = fasta.getName().endsWith(".gz") ? true : false + def fasta_name = fasta.getName().replace(".gz", "") + """ + if [ "$is_compressed" == "true" ]; then + gzip -c -d $fasta > $fasta_name + fi + + HpsuisSero.sh \\ + -i $fasta_name \\ + -o ./ \\ + -s $prefix \\ + -x fasta \\ + -t $task.cpus + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + hpsuissero: $VERSION + END_VERSIONS + """ +} diff --git a/modules/hpsuissero/meta.yml b/modules/hpsuissero/meta.yml new file mode 100644 index 00000000..2f48c6c3 --- /dev/null +++ b/modules/hpsuissero/meta.yml @@ -0,0 +1,43 @@ +name: hpsuissero +description: Serotype prediction of Haemophilus parasuis assemblies +keywords: + - bacteria + - fasta + - haemophilus +tools: + - hpsuissero: + description: Rapid Haemophilus parasuis serotyping pipeline for Nanpore data + homepage: https://github.com/jimmyliu1326/HpsuisSero + documentation: https://github.com/jimmyliu1326/HpsuisSero + tool_dev_url: https://github.com/jimmyliu1326/HpsuisSero + doi: "" + licence: ['MIT'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - fasta: + type: file + description: Assembly in FASTA format + pattern: "*.{fasta,fasta.gz,fa,fa.gz,fna,fna.gz,faa,faa.gz}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - tsv: + type: file + description: Tab-delimited serotype prediction + pattern: "*.{tsv}" + +authors: + - "@rpetit3" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 5ae30708..b4e8428f 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -807,6 +807,10 @@ homer/makeucscfile: - modules/homer/makeucscfile/** - tests/modules/homer/makeucscfile/** +hpsuissero: + - modules/hpsuissero/** + - tests/modules/hpsuissero/** + ichorcna/createpon: - modules/ichorcna/createpon/** - tests/modules/ichorcna/createpon/** diff --git a/tests/modules/hpsuissero/main.nf b/tests/modules/hpsuissero/main.nf new file mode 100644 index 00000000..f66fcd93 --- /dev/null +++ b/tests/modules/hpsuissero/main.nf @@ -0,0 +1,15 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { HPSUISSERO } from '../../../modules/hpsuissero/main.nf' + +workflow test_hpsuissero { + + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['haemophilus_influenzae']['genome']['genome_fna_gz'], checkIfExists: true) + ] + + HPSUISSERO ( input ) +} diff --git a/tests/modules/hpsuissero/nextflow.config b/tests/modules/hpsuissero/nextflow.config new file mode 100644 index 00000000..50f50a7a --- /dev/null +++ b/tests/modules/hpsuissero/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} \ No newline at end of file diff --git a/tests/modules/hpsuissero/test.yml b/tests/modules/hpsuissero/test.yml new file mode 100644 index 00000000..33b26eb2 --- /dev/null +++ b/tests/modules/hpsuissero/test.yml @@ -0,0 +1,9 @@ +- name: hpsuissero test_hpsuissero + command: nextflow run tests/modules/hpsuissero -entry test_hpsuissero -c tests/config/nextflow.config + tags: + - hpsuissero + files: + - path: output/hpsuissero/test_serotyping_res.tsv + md5sum: 559dd2ca386eeb58f3975e3204ce9d43 + - path: output/hpsuissero/versions.yml + md5sum: f65438e63a74ac6ee365bfdbbd3f996a From 9e0abcc44319272104309c52aa7cf1d398baf466 Mon Sep 17 00:00:00 2001 From: "Robert A. Petit III" Date: Sun, 27 Feb 2022 20:04:03 -0700 Subject: [PATCH 005/283] add module for ssuisero (#1329) * add module for ssuisero * Update main.nf * Update meta.yml Co-authored-by: Sateesh <33637490+sateeshperi@users.noreply.github.com> --- modules/ssuissero/main.nf | 44 +++++++++++++++++++++++++ modules/ssuissero/meta.yml | 43 ++++++++++++++++++++++++ tests/config/pytest_modules.yml | 4 +++ tests/modules/ssuissero/main.nf | 15 +++++++++ tests/modules/ssuissero/nextflow.config | 5 +++ tests/modules/ssuissero/test.yml | 9 +++++ 6 files changed, 120 insertions(+) create mode 100644 modules/ssuissero/main.nf create mode 100644 modules/ssuissero/meta.yml create mode 100644 tests/modules/ssuissero/main.nf create mode 100644 tests/modules/ssuissero/nextflow.config create mode 100644 tests/modules/ssuissero/test.yml diff --git a/modules/ssuissero/main.nf b/modules/ssuissero/main.nf new file mode 100644 index 00000000..d1e5744a --- /dev/null +++ b/modules/ssuissero/main.nf @@ -0,0 +1,44 @@ +def VERSION = '1.0.1' // Version information not provided by tool on CLI + +process SSUISSERO { + tag "$meta.id" + label 'process_low' + + conda (params.enable_conda ? "bioconda::ssuissero=1.0.1" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/ssuissero%3A1.0.1--hdfd78af_0': + 'quay.io/biocontainers/ssuissero:1.0.1--hdfd78af_0' }" + + input: + tuple val(meta), path(fasta) + + output: + tuple val(meta), path("*.tsv"), emit: tsv + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def is_compressed = fasta.getName().endsWith(".gz") ? true : false + def fasta_name = fasta.getName().replace(".gz", "") + """ + if [ "$is_compressed" == "true" ]; then + gzip -c -d $fasta > $fasta_name + fi + + SsuisSero.sh \\ + -i $fasta_name \\ + -o ./ \\ + -s $prefix \\ + -x fasta \\ + -t $task.cpus + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + ssuissero: $VERSION + END_VERSIONS + """ +} diff --git a/modules/ssuissero/meta.yml b/modules/ssuissero/meta.yml new file mode 100644 index 00000000..2c0031e6 --- /dev/null +++ b/modules/ssuissero/meta.yml @@ -0,0 +1,43 @@ +name: ssuissero +description: Serotype prediction of Streptococcus suis assemblies +keywords: + - bacteria + - fasta + - streptococcus +tools: + - ssuissero: + description: Rapid Streptococcus suis serotyping pipeline for Nanopore Data + homepage: https://github.com/jimmyliu1326/SsuisSero + documentation: https://github.com/jimmyliu1326/SsuisSero + tool_dev_url: https://github.com/jimmyliu1326/SsuisSero + doi: "" + licence: ['MIT'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - fasta: + type: file + description: Assembly in FASTA format + pattern: "*.{fasta,fasta.gz,fa,fa.gz,fna,fna.gz,faa,faa.gz}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - tsv: + type: file + description: Tab-delimited serotype prediction + pattern: "*.{tsv}" + +authors: + - "@rpetit3" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index b4e8428f..8987044c 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -1537,6 +1537,10 @@ sratools/prefetch: - modules/sratools/prefetch/** - tests/modules/sratools/prefetch/** +ssuissero: + - modules/ssuissero/** + - tests/modules/ssuissero/** + staphopiasccmec: - modules/staphopiasccmec/** - tests/modules/staphopiasccmec/** diff --git a/tests/modules/ssuissero/main.nf b/tests/modules/ssuissero/main.nf new file mode 100644 index 00000000..aa285133 --- /dev/null +++ b/tests/modules/ssuissero/main.nf @@ -0,0 +1,15 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { SSUISSERO } from '../../../modules/ssuissero/main.nf' + +workflow test_ssuissero { + + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['haemophilus_influenzae']['genome']['genome_fna_gz'], checkIfExists: true) + ] + + SSUISSERO ( input ) +} diff --git a/tests/modules/ssuissero/nextflow.config b/tests/modules/ssuissero/nextflow.config new file mode 100644 index 00000000..50f50a7a --- /dev/null +++ b/tests/modules/ssuissero/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} \ No newline at end of file diff --git a/tests/modules/ssuissero/test.yml b/tests/modules/ssuissero/test.yml new file mode 100644 index 00000000..ff61b267 --- /dev/null +++ b/tests/modules/ssuissero/test.yml @@ -0,0 +1,9 @@ +- name: ssuissero test_ssuissero + command: nextflow run tests/modules/ssuissero -entry test_ssuissero -c tests/config/nextflow.config + tags: + - ssuissero + files: + - path: output/ssuissero/test_serotyping_res.tsv + md5sum: 559dd2ca386eeb58f3975e3204ce9d43 + - path: output/ssuissero/versions.yml + md5sum: be29b478690b2047e0413ffe01c85e1e From 841c661cad7181a36fd20bb306258d17b750c873 Mon Sep 17 00:00:00 2001 From: "Thomas A. Christensen II" <25492070+MillironX@users.noreply.github.com> Date: Mon, 28 Feb 2022 07:40:24 +0000 Subject: [PATCH 006/283] Add MAFFT module (#1351) Signed-off-by: Thomas A. Christensen II <25492070+MillironX@users.noreply.github.com> Co-authored-by: Sateesh <33637490+sateeshperi@users.noreply.github.com> Co-authored-by: Robert A. Petit III --- modules/mafft/main.nf | 35 ++++++++++++++++++++++++ modules/mafft/meta.yml | 42 +++++++++++++++++++++++++++++ tests/config/pytest_modules.yml | 4 +++ tests/modules/mafft/main.nf | 15 +++++++++++ tests/modules/mafft/nextflow.config | 6 +++++ tests/modules/mafft/test.yml | 9 +++++++ 6 files changed, 111 insertions(+) create mode 100644 modules/mafft/main.nf create mode 100644 modules/mafft/meta.yml create mode 100644 tests/modules/mafft/main.nf create mode 100644 tests/modules/mafft/nextflow.config create mode 100644 tests/modules/mafft/test.yml diff --git a/modules/mafft/main.nf b/modules/mafft/main.nf new file mode 100644 index 00000000..99485b61 --- /dev/null +++ b/modules/mafft/main.nf @@ -0,0 +1,35 @@ +process MAFFT { + tag "$meta.id" + label 'process_high' + + conda (params.enable_conda ? "bioconda::mafft=7.490" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mafft:7.490--h779adbc_0': + 'quay.io/biocontainers/mafft:7.490--h779adbc_0' }" + + input: + tuple val(meta), path(fasta) + + output: + tuple val(meta), path("*.fas"), emit: fas + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + """ + mafft \\ + --thread ${task.cpus} \\ + ${args} \\ + ${fasta} \\ + > ${prefix}.fas + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + mafft: \$(mafft --version 2>&1 | sed 's/^v//' | sed 's/ (.*)//') + END_VERSIONS + """ +} diff --git a/modules/mafft/meta.yml b/modules/mafft/meta.yml new file mode 100644 index 00000000..10c7f0c2 --- /dev/null +++ b/modules/mafft/meta.yml @@ -0,0 +1,42 @@ +name: mafft +description: Multiple sequence alignment using MAFFT +keywords: + - msa + - multiple sequence alignment +tools: + - mafft: + description: Multiple alignment program for amino acid or nucleotide sequences based on fast Fourier transform + homepage: https://mafft.cbrc.jp/alignment/software/ + documentation: https://mafft.cbrc.jp/alignment/software/manual/manual.html + tool_dev_url: https://mafft.cbrc.jp/alignment/software/source.html + doi: "10.1093/nar/gkf436" + licence: ['BSD'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - fasta: + type: file + description: FASTA file containing the sequences to align + pattern: "*.{fa,fasta}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - fas: + type: file + description: Aligned sequences in FASTA format + pattern: "*.{fas}" + +authors: + - "@MillironX" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 8987044c..f4ed22c8 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -964,6 +964,10 @@ macs2/callpeak: - modules/macs2/callpeak/** - tests/modules/macs2/callpeak/** +mafft: + - modules/mafft/** + - tests/modules/mafft/** + malt/build: - modules/malt/build/** - tests/modules/malt/build_test/** diff --git a/tests/modules/mafft/main.nf b/tests/modules/mafft/main.nf new file mode 100644 index 00000000..7f50b35a --- /dev/null +++ b/tests/modules/mafft/main.nf @@ -0,0 +1,15 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { MAFFT } from '../../../modules/mafft/main.nf' + +workflow test_mafft { + + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['scaffolds_fasta'], checkIfExists: true) + ] + + MAFFT ( input ) +} diff --git a/tests/modules/mafft/nextflow.config b/tests/modules/mafft/nextflow.config new file mode 100644 index 00000000..46cc926e --- /dev/null +++ b/tests/modules/mafft/nextflow.config @@ -0,0 +1,6 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + ext.args = "--auto" + +} diff --git a/tests/modules/mafft/test.yml b/tests/modules/mafft/test.yml new file mode 100644 index 00000000..cd40caa7 --- /dev/null +++ b/tests/modules/mafft/test.yml @@ -0,0 +1,9 @@ +- name: mafft test_mafft + command: nextflow run tests/modules/mafft -entry test_mafft -c tests/config/nextflow.config + tags: + - mafft + files: + - path: output/mafft/test.fas + md5sum: 23426611f4a0df532b6708f072bd445b + - path: output/mafft/versions.yml + md5sum: b1b5ab3728ae17401808335f1c8f8215 From 1ad73f1b2abdea9398680d6d20014838135c9a35 Mon Sep 17 00:00:00 2001 From: FriederikeHanssen Date: Mon, 28 Feb 2022 12:42:29 +0100 Subject: [PATCH 007/283] update samtools version to 1.15 (#1358) * update samtools version to 1.15 * Update checksums --- modules/samtools/ampliconclip/main.nf | 6 +++--- modules/samtools/bam2fq/main.nf | 6 +++--- modules/samtools/depth/main.nf | 6 +++--- modules/samtools/faidx/main.nf | 6 +++--- modules/samtools/fastq/main.nf | 6 +++--- modules/samtools/fixmate/main.nf | 6 +++--- modules/samtools/flagstat/main.nf | 6 +++--- modules/samtools/idxstats/main.nf | 6 +++--- modules/samtools/index/main.nf | 6 +++--- modules/samtools/merge/main.nf | 6 +++--- modules/samtools/mpileup/main.nf | 6 +++--- modules/samtools/sort/main.nf | 6 +++--- modules/samtools/stats/main.nf | 6 +++--- modules/samtools/view/main.nf | 6 +++--- tests/modules/samtools/ampliconclip/test.yml | 10 +++++----- tests/modules/samtools/bam2fq/test.yml | 4 ++-- tests/modules/samtools/faidx/test.yml | 2 +- tests/modules/samtools/fastq/test.yml | 4 ++-- tests/modules/samtools/fixmate/test.yml | 2 +- tests/modules/samtools/index/test.yml | 2 +- tests/modules/samtools/sort/test.yml | 2 +- tests/modules/samtools/stats/test.yml | 4 ++-- 22 files changed, 57 insertions(+), 57 deletions(-) diff --git a/modules/samtools/ampliconclip/main.nf b/modules/samtools/ampliconclip/main.nf index 69c2ff7b..4e76b1b4 100644 --- a/modules/samtools/ampliconclip/main.nf +++ b/modules/samtools/ampliconclip/main.nf @@ -2,10 +2,10 @@ process SAMTOOLS_AMPLICONCLIP { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? "bioconda::samtools=1.14" : null) + conda (params.enable_conda ? "bioconda::samtools=1.15" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : - 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" + 'https://depot.galaxyproject.org/singularity/samtools:1.15--h1170115_1' : + 'quay.io/biocontainers/samtools:1.15--h1170115_1' }" input: tuple val(meta), path(bam) diff --git a/modules/samtools/bam2fq/main.nf b/modules/samtools/bam2fq/main.nf index 4b78a46e..8dd64dc0 100644 --- a/modules/samtools/bam2fq/main.nf +++ b/modules/samtools/bam2fq/main.nf @@ -2,10 +2,10 @@ process SAMTOOLS_BAM2FQ { tag "$meta.id" label 'process_low' - conda (params.enable_conda ? "bioconda::samtools=1.14" : null) + conda (params.enable_conda ? "bioconda::samtools=1.15" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : - 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" + 'https://depot.galaxyproject.org/singularity/samtools:1.15--h1170115_1' : + 'quay.io/biocontainers/samtools:1.15--h1170115_1' }" input: tuple val(meta), path(inputbam) diff --git a/modules/samtools/depth/main.nf b/modules/samtools/depth/main.nf index d68c5adf..4870b2d8 100644 --- a/modules/samtools/depth/main.nf +++ b/modules/samtools/depth/main.nf @@ -2,10 +2,10 @@ process SAMTOOLS_DEPTH { tag "$meta.id" label 'process_low' - conda (params.enable_conda ? "bioconda::samtools=1.14" : null) + conda (params.enable_conda ? "bioconda::samtools=1.15" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : - 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" + 'https://depot.galaxyproject.org/singularity/samtools:1.15--h1170115_1' : + 'quay.io/biocontainers/samtools:1.15--h1170115_1' }" input: tuple val(meta), path(bam) diff --git a/modules/samtools/faidx/main.nf b/modules/samtools/faidx/main.nf index b83a4952..7732a4ec 100644 --- a/modules/samtools/faidx/main.nf +++ b/modules/samtools/faidx/main.nf @@ -2,10 +2,10 @@ process SAMTOOLS_FAIDX { tag "$fasta" label 'process_low' - conda (params.enable_conda ? "bioconda::samtools=1.14" : null) + conda (params.enable_conda ? "bioconda::samtools=1.15" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : - 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" + 'https://depot.galaxyproject.org/singularity/samtools:1.15--h1170115_1' : + 'quay.io/biocontainers/samtools:1.15--h1170115_1' }" input: tuple val(meta), path(fasta) diff --git a/modules/samtools/fastq/main.nf b/modules/samtools/fastq/main.nf index 7eb60117..6408d4a4 100644 --- a/modules/samtools/fastq/main.nf +++ b/modules/samtools/fastq/main.nf @@ -2,10 +2,10 @@ process SAMTOOLS_FASTQ { tag "$meta.id" label 'process_low' - conda (params.enable_conda ? "bioconda::samtools=1.14" : null) + conda (params.enable_conda ? "bioconda::samtools=1.15" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : - 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" + 'https://depot.galaxyproject.org/singularity/samtools:1.15--h1170115_1' : + 'quay.io/biocontainers/samtools:1.15--h1170115_1' }" input: tuple val(meta), path(bam) diff --git a/modules/samtools/fixmate/main.nf b/modules/samtools/fixmate/main.nf index c1a8164b..14c9db9f 100644 --- a/modules/samtools/fixmate/main.nf +++ b/modules/samtools/fixmate/main.nf @@ -2,10 +2,10 @@ process SAMTOOLS_FIXMATE { tag "$meta.id" label 'process_low' - conda (params.enable_conda ? "bioconda::samtools=1.14" : null) + conda (params.enable_conda ? "bioconda::samtools=1.15" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : - 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" + 'https://depot.galaxyproject.org/singularity/samtools:1.15--h1170115_1' : + 'quay.io/biocontainers/samtools:1.15--h1170115_1' }" input: tuple val(meta), path(bam) diff --git a/modules/samtools/flagstat/main.nf b/modules/samtools/flagstat/main.nf index c267922b..9e3440ac 100644 --- a/modules/samtools/flagstat/main.nf +++ b/modules/samtools/flagstat/main.nf @@ -2,10 +2,10 @@ process SAMTOOLS_FLAGSTAT { tag "$meta.id" label 'process_low' - conda (params.enable_conda ? "bioconda::samtools=1.14" : null) + conda (params.enable_conda ? "bioconda::samtools=1.15" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : - 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" + 'https://depot.galaxyproject.org/singularity/samtools:1.15--h1170115_1' : + 'quay.io/biocontainers/samtools:1.15--h1170115_1' }" input: tuple val(meta), path(bam), path(bai) diff --git a/modules/samtools/idxstats/main.nf b/modules/samtools/idxstats/main.nf index 8a057413..7d5cee17 100644 --- a/modules/samtools/idxstats/main.nf +++ b/modules/samtools/idxstats/main.nf @@ -2,10 +2,10 @@ process SAMTOOLS_IDXSTATS { tag "$meta.id" label 'process_low' - conda (params.enable_conda ? "bioconda::samtools=1.14" : null) + conda (params.enable_conda ? "bioconda::samtools=1.15" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : - 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" + 'https://depot.galaxyproject.org/singularity/samtools:1.15--h1170115_1' : + 'quay.io/biocontainers/samtools:1.15--h1170115_1' }" input: tuple val(meta), path(bam), path(bai) diff --git a/modules/samtools/index/main.nf b/modules/samtools/index/main.nf index dfe0234f..e41cdcc8 100644 --- a/modules/samtools/index/main.nf +++ b/modules/samtools/index/main.nf @@ -2,10 +2,10 @@ process SAMTOOLS_INDEX { tag "$meta.id" label 'process_low' - conda (params.enable_conda ? "bioconda::samtools=1.14" : null) + conda (params.enable_conda ? "bioconda::samtools=1.15" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : - 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" + 'https://depot.galaxyproject.org/singularity/samtools:1.15--h1170115_1' : + 'quay.io/biocontainers/samtools:1.15--h1170115_1' }" input: tuple val(meta), path(input) diff --git a/modules/samtools/merge/main.nf b/modules/samtools/merge/main.nf index be6fe32e..7b771677 100644 --- a/modules/samtools/merge/main.nf +++ b/modules/samtools/merge/main.nf @@ -2,10 +2,10 @@ process SAMTOOLS_MERGE { tag "$meta.id" label 'process_low' - conda (params.enable_conda ? "bioconda::samtools=1.14" : null) + conda (params.enable_conda ? "bioconda::samtools=1.15" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : - 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" + 'https://depot.galaxyproject.org/singularity/samtools:1.15--h1170115_1' : + 'quay.io/biocontainers/samtools:1.15--h1170115_1' }" input: tuple val(meta), path(input_files) diff --git a/modules/samtools/mpileup/main.nf b/modules/samtools/mpileup/main.nf index 77afae60..cea40321 100644 --- a/modules/samtools/mpileup/main.nf +++ b/modules/samtools/mpileup/main.nf @@ -2,10 +2,10 @@ process SAMTOOLS_MPILEUP { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? "bioconda::samtools=1.14" : null) + conda (params.enable_conda ? "bioconda::samtools=1.15" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : - 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" + 'https://depot.galaxyproject.org/singularity/samtools:1.15--h1170115_1' : + 'quay.io/biocontainers/samtools:1.15--h1170115_1' }" input: tuple val(meta), path(bam) diff --git a/modules/samtools/sort/main.nf b/modules/samtools/sort/main.nf index 0f2237cc..0e2de8ba 100644 --- a/modules/samtools/sort/main.nf +++ b/modules/samtools/sort/main.nf @@ -2,10 +2,10 @@ process SAMTOOLS_SORT { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? "bioconda::samtools=1.14" : null) + conda (params.enable_conda ? "bioconda::samtools=1.15" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : - 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" + 'https://depot.galaxyproject.org/singularity/samtools:1.15--h1170115_1' : + 'quay.io/biocontainers/samtools:1.15--h1170115_1' }" input: tuple val(meta), path(bam) diff --git a/modules/samtools/stats/main.nf b/modules/samtools/stats/main.nf index f6fe3bfe..6efc9d9a 100644 --- a/modules/samtools/stats/main.nf +++ b/modules/samtools/stats/main.nf @@ -2,10 +2,10 @@ process SAMTOOLS_STATS { tag "$meta.id" label 'process_low' - conda (params.enable_conda ? "bioconda::samtools=1.14" : null) + conda (params.enable_conda ? "bioconda::samtools=1.15" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : - 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" + 'https://depot.galaxyproject.org/singularity/samtools:1.15--h1170115_1' : + 'quay.io/biocontainers/samtools:1.15--h1170115_1' }" input: tuple val(meta), path(input), path(input_index) diff --git a/modules/samtools/view/main.nf b/modules/samtools/view/main.nf index aee21a4e..75aad063 100644 --- a/modules/samtools/view/main.nf +++ b/modules/samtools/view/main.nf @@ -2,10 +2,10 @@ process SAMTOOLS_VIEW { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? "bioconda::samtools=1.14" : null) + conda (params.enable_conda ? "bioconda::samtools=1.15" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : - 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" + 'https://depot.galaxyproject.org/singularity/samtools:1.15--h1170115_1' : + 'quay.io/biocontainers/samtools:1.15--h1170115_1' }" input: tuple val(meta), path(input) diff --git a/tests/modules/samtools/ampliconclip/test.yml b/tests/modules/samtools/ampliconclip/test.yml index e8fd456c..e9947562 100644 --- a/tests/modules/samtools/ampliconclip/test.yml +++ b/tests/modules/samtools/ampliconclip/test.yml @@ -5,7 +5,7 @@ - samtools/ampliconclip files: - path: output/samtools/test.bam - md5sum: 678f9ab04fbe3206f0f96e170fd833e9 + md5sum: 5d0e8bc9e6059ef3a63ee6328a3935c7 - name: samtools ampliconclip no stats with rejects command: nextflow run ./tests/modules/samtools/ampliconclip -entry test_samtools_ampliconclip_no_stats_with_rejects -c ./tests/config/nextflow.config -c ./tests/modules/samtools/ampliconclip/nextflow.config @@ -14,9 +14,9 @@ - samtools/ampliconclip files: - path: output/samtools/test.bam - md5sum: bbf65ea626539d96c8271e17d1fc988b + md5sum: 2c998295d624c59620b7ffdb0cc080e2 - path: output/samtools/test.cliprejects.bam - md5sum: a0bee15aead020d16d0c81bd9667df46 + md5sum: f3ebba8d91ad29cc4d2d00943e6f6bab - name: samtools ampliconclip with stats with rejects command: nextflow run ./tests/modules/samtools/ampliconclip -entry test_samtools_ampliconclip_with_stats_with_rejects -c ./tests/config/nextflow.config -c ./tests/modules/samtools/ampliconclip/nextflow.config @@ -25,8 +25,8 @@ - samtools/ampliconclip files: - path: output/samtools/test.bam - md5sum: f5a3611ecad34ba2dde77096e1c7dd93 + md5sum: 87882973b425ab27aad6ef18faf11f25 - path: output/samtools/test.cliprejects.bam - md5sum: 90ee7ce908b4bdb89ab41e4410de9012 + md5sum: eb5e186e1a69864dc2e99a290f02ff78 - path: output/samtools/test.clipstats.txt md5sum: fc23355e1743d47f2541f2cb1a7a0cda diff --git a/tests/modules/samtools/bam2fq/test.yml b/tests/modules/samtools/bam2fq/test.yml index feb994fd..213c7a2d 100644 --- a/tests/modules/samtools/bam2fq/test.yml +++ b/tests/modules/samtools/bam2fq/test.yml @@ -14,9 +14,9 @@ - samtools files: - path: output/samtools/test_1.fq.gz - md5sum: 4522edbe158ec4804765794569f67493 + md5sum: 1c84aadcdca10e97be2b5b6ce773f5ed - path: output/samtools/test_2.fq.gz - md5sum: 7e00ef40d5cfe272b67461381019dcc1 + md5sum: e679ec035d3208785e704458d6b68c8c - path: output/samtools/test_other.fq.gz md5sum: 709872fc2910431b1e8b7074bfe38c67 - path: output/samtools/test_singleton.fq.gz diff --git a/tests/modules/samtools/faidx/test.yml b/tests/modules/samtools/faidx/test.yml index dc2184ee..1a49a0d5 100644 --- a/tests/modules/samtools/faidx/test.yml +++ b/tests/modules/samtools/faidx/test.yml @@ -7,4 +7,4 @@ - path: output/samtools/genome.fasta.fai md5sum: 9da2a56e2853dc8c0b86a9e7229c9fe5 - path: output/samtools/versions.yml - md5sum: d56671a7c8f8058944d3d536c3058f7f + md5sum: 6a16b2148a0ab43e6d0506056e6a0409 diff --git a/tests/modules/samtools/fastq/test.yml b/tests/modules/samtools/fastq/test.yml index 39da9889..ff39d61a 100644 --- a/tests/modules/samtools/fastq/test.yml +++ b/tests/modules/samtools/fastq/test.yml @@ -5,6 +5,6 @@ - samtools/fastq files: - path: output/samtools/test_2.fastq.gz - md5sum: 3b1c92f33a44a78d82f8360ab4fdfd61 + md5sum: 51e7a469b554de694799bec982fd722e - path: output/samtools/test_1.fastq.gz - md5sum: 5a3f9c69a032c4ffd9071ea31a14e6f9 + md5sum: 6c2d5b467eb94e058300271a542e34e6 diff --git a/tests/modules/samtools/fixmate/test.yml b/tests/modules/samtools/fixmate/test.yml index 8e87e059..59cd6b41 100644 --- a/tests/modules/samtools/fixmate/test.yml +++ b/tests/modules/samtools/fixmate/test.yml @@ -5,4 +5,4 @@ - samtools/fixmate files: - path: output/samtools/test.bam - md5sum: a4092657a4b17170c7702a76cbf192a1 + md5sum: c7f574bb0c469e0ccfecb6b7210e03c5 diff --git a/tests/modules/samtools/index/test.yml b/tests/modules/samtools/index/test.yml index 7184be8f..09684166 100644 --- a/tests/modules/samtools/index/test.yml +++ b/tests/modules/samtools/index/test.yml @@ -23,4 +23,4 @@ - samtools/index files: - path: output/samtools/test.paired_end.sorted.bam.csi - md5sum: 3dd9e3ed959fca075b88bb8dc3cf7dbd + md5sum: 8d63373007553e74d823fc2b9cbcf84d diff --git a/tests/modules/samtools/sort/test.yml b/tests/modules/samtools/sort/test.yml index dfd2eb69..4535dd09 100644 --- a/tests/modules/samtools/sort/test.yml +++ b/tests/modules/samtools/sort/test.yml @@ -5,4 +5,4 @@ - samtools/sort files: - path: output/samtools/test.sorted.bam - md5sum: 4adc495469724a375d5e1a9f3485e38d + md5sum: a73238d6b896a3a946025d6b13fe9525 diff --git a/tests/modules/samtools/stats/test.yml b/tests/modules/samtools/stats/test.yml index d3444f02..44b7ef8c 100644 --- a/tests/modules/samtools/stats/test.yml +++ b/tests/modules/samtools/stats/test.yml @@ -5,7 +5,7 @@ - samtools files: - path: output/samtools/test.paired_end.sorted.bam.stats - md5sum: 09146eeecfcae2a84fb8615c86cd8d64 + md5sum: 6e3ca28b3e98dade14992dd7ea5fc886 - name: samtools stats test_samtools_stats_cram command: nextflow run ./tests/modules/samtools/stats -entry test_samtools_stats_cram -c ./tests/config/nextflow.config -c ./tests/modules/samtools/stats/nextflow.config @@ -14,4 +14,4 @@ - samtools files: - path: output/samtools/test.paired_end.recalibrated.sorted.cram.stats - md5sum: ab49e7380714b7033e374ba1114e5e54 + md5sum: 985455b573444c3743510d603ed41f8c From ef811d952bc21f4e26a60fd96d57979ca4fb6ce7 Mon Sep 17 00:00:00 2001 From: Mei Wu <25568561+projectoriented@users.noreply.github.com> Date: Mon, 28 Feb 2022 14:23:12 +0100 Subject: [PATCH 008/283] bcftools/annotate ready2go (#1291) * bcf annotate ready2go * edited output name * fixed output * updated bcftools ver * changed contain output string * removed contain key entirely * fixed md5sum for test.yml Co-authored-by: Robert A. Petit III Co-authored-by: Sateesh <33637490+sateeshperi@users.noreply.github.com> --- modules/bcftools/annotate/main.nf | 42 +++++++++++++++++ modules/bcftools/annotate/meta.yml | 45 +++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/bcftools/annotate/main.nf | 14 ++++++ .../modules/bcftools/annotate/nextflow.config | 5 +++ tests/modules/bcftools/annotate/test.yml | 9 ++++ 6 files changed, 119 insertions(+) create mode 100644 modules/bcftools/annotate/main.nf create mode 100644 modules/bcftools/annotate/meta.yml create mode 100644 tests/modules/bcftools/annotate/main.nf create mode 100644 tests/modules/bcftools/annotate/nextflow.config create mode 100644 tests/modules/bcftools/annotate/test.yml diff --git a/modules/bcftools/annotate/main.nf b/modules/bcftools/annotate/main.nf new file mode 100644 index 00000000..437baaf3 --- /dev/null +++ b/modules/bcftools/annotate/main.nf @@ -0,0 +1,42 @@ +process BCFTOOLS_ANNOTATE { + tag "$meta.id" + label 'process_low' + + conda (params.enable_conda ? "bioconda::bcftools=1.15" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bcftools:1.15--haf5b3da_0': + 'quay.io/biocontainers/bcftools:1.15--haf5b3da_0' }" + + input: + tuple val(meta), path(input) + + output: + tuple val(meta), path("*_annotated.vcf.gz"), optional:true , emit: vcf + tuple val(meta), path("*_annotated.bcf") , optional:true , emit: bcf + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + + def matcher = input =~ /vcf/ + def output_suffix = matcher ? "vcf.gz" : "bcf" + def output_type_compressed = matcher ? "z" : "b" + """ + bcftools \\ + annotate \\ + $args \\ + --output ${prefix}_annotated.${output_suffix} \\ + --output-type $output_type_compressed \\ + --threads $task.cpus \\ + $input + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + bcftools: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') + END_VERSIONS + """ +} diff --git a/modules/bcftools/annotate/meta.yml b/modules/bcftools/annotate/meta.yml new file mode 100644 index 00000000..3ed124d5 --- /dev/null +++ b/modules/bcftools/annotate/meta.yml @@ -0,0 +1,45 @@ +name: bcftools_annotate +description: Add or remove annotations. +keywords: + - bcftools + - annotate + - vcf + - remove + - add +tools: + - annotate: + description: Add or remove annotations. + homepage: http://samtools.github.io/bcftools/bcftools.html + documentation: https://samtools.github.io/bcftools/bcftools.html#annotate + doi: 10.1093/bioinformatics/btp352 + licence: ['MIT'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - input: + type: files + description: Query VCF or BCF file, can be either uncompressed or compressed +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - vcf: + type: file + description: Compressed annotated VCF file + pattern: "*_annotated.vcf.gz" + - bcf: + type: file + description: Compressed annotated BCF file + pattern: "*_annotated.bcf" +authors: + - "@projectoriented" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index f4ed22c8..c553b9ce 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -98,6 +98,10 @@ bbmap/index: - modules/bbmap/index/** - tests/modules/bbmap/index/** +bcftools/annotate: + - modules/bcftools/annotate/** + - tests/modules/bcftools/annotate/** + bcftools/concat: - modules/bcftools/concat/** - tests/modules/bcftools/concat/** diff --git a/tests/modules/bcftools/annotate/main.nf b/tests/modules/bcftools/annotate/main.nf new file mode 100644 index 00000000..2f2b66c9 --- /dev/null +++ b/tests/modules/bcftools/annotate/main.nf @@ -0,0 +1,14 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { BCFTOOLS_ANNOTATE } from '../../../../modules/bcftools/annotate/main.nf' + +workflow test_bcftools_annotate { + + input = [ + [ id:'test_compressed', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_vcf_gz'], checkIfExists: true) ] + + BCFTOOLS_ANNOTATE ( input ) +} diff --git a/tests/modules/bcftools/annotate/nextflow.config b/tests/modules/bcftools/annotate/nextflow.config new file mode 100644 index 00000000..2670da17 --- /dev/null +++ b/tests/modules/bcftools/annotate/nextflow.config @@ -0,0 +1,5 @@ +process { + ext.args = "-x ID,INFO/DP,FORMAT/DP" + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} \ No newline at end of file diff --git a/tests/modules/bcftools/annotate/test.yml b/tests/modules/bcftools/annotate/test.yml new file mode 100644 index 00000000..43537180 --- /dev/null +++ b/tests/modules/bcftools/annotate/test.yml @@ -0,0 +1,9 @@ +- name: bcftools annotate test_bcftools_annotate + command: nextflow run tests/modules/bcftools/annotate -entry test_bcftools_annotate -c tests/config/nextflow.config + tags: + - bcftools/annotate + - bcftools + files: + - path: output/bcftools/test_compressed_annotated.vcf.gz + - path: output/bcftools/versions.yml + md5sum: b647b465acc221f6fe6fbcc319724eed From 38ffbfdb63454d20c56cd16e9b8b489165fea0c9 Mon Sep 17 00:00:00 2001 From: Mei Wu <25568561+projectoriented@users.noreply.github.com> Date: Mon, 28 Feb 2022 16:16:29 +0100 Subject: [PATCH 009/283] bcftools/annotate follow up corrections (#1359) * bcf annotate ready2go * edited output name * fixed output * updated bcftools ver * changed contain output string * removed contain key entirely * fixed md5sum for test.yml * using match instead of find * bcftools/annotate refactored with complete test * rm trailing white space Co-authored-by: Robert A. Petit III --- modules/bcftools/annotate/main.nf | 4 ++-- tests/config/test_data.config | 2 ++ tests/modules/bcftools/annotate/main.nf | 15 ++++++++++++--- tests/modules/bcftools/annotate/test.yml | 18 ++++++++++++++---- 4 files changed, 30 insertions(+), 9 deletions(-) diff --git a/modules/bcftools/annotate/main.nf b/modules/bcftools/annotate/main.nf index 437baaf3..40e32047 100644 --- a/modules/bcftools/annotate/main.nf +++ b/modules/bcftools/annotate/main.nf @@ -22,7 +22,7 @@ process BCFTOOLS_ANNOTATE { def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" - def matcher = input =~ /vcf/ + def matcher = input ==~ /\S+\.*vcf\.\S*/ def output_suffix = matcher ? "vcf.gz" : "bcf" def output_type_compressed = matcher ? "z" : "b" """ @@ -36,7 +36,7 @@ process BCFTOOLS_ANNOTATE { cat <<-END_VERSIONS > versions.yml "${task.process}": - bcftools: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') + bcftools: \$( bcftools --version |& sed '1!d; s/^.*bcftools //' ) END_VERSIONS """ } diff --git a/tests/config/test_data.config b/tests/config/test_data.config index e28b0cb0..7c3e183c 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -67,6 +67,8 @@ params { test_computematrix_mat_gz = "${test_data_dir}/genomics/sarscov2/illumina/deeptools/test.computeMatrix.mat.gz" + test_bcf = "${test_data_dir}/genomics/sarscov2/illumina/vcf/test.bcf" + test_vcf = "${test_data_dir}/genomics/sarscov2/illumina/vcf/test.vcf" test_vcf_gz = "${test_data_dir}/genomics/sarscov2/illumina/vcf/test.vcf.gz" test_vcf_gz_tbi = "${test_data_dir}/genomics/sarscov2/illumina/vcf/test.vcf.gz.tbi" diff --git a/tests/modules/bcftools/annotate/main.nf b/tests/modules/bcftools/annotate/main.nf index 2f2b66c9..0d096ca9 100644 --- a/tests/modules/bcftools/annotate/main.nf +++ b/tests/modules/bcftools/annotate/main.nf @@ -4,11 +4,20 @@ nextflow.enable.dsl = 2 include { BCFTOOLS_ANNOTATE } from '../../../../modules/bcftools/annotate/main.nf' -workflow test_bcftools_annotate { - +workflow test_bcftools_annotate_out_vcf { + input = [ - [ id:'test_compressed', single_end:false ], // meta map + [ id:'test_compressed_vcf', single_end:false ], // meta map file(params.test_data['sarscov2']['illumina']['test_vcf_gz'], checkIfExists: true) ] BCFTOOLS_ANNOTATE ( input ) } + +workflow test_bcftools_annotate_out_bcf { + + input = [ + [ id:'test_compressed_bcf', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_bcf'], checkIfExists: true) ] + + BCFTOOLS_ANNOTATE ( input ) +} diff --git a/tests/modules/bcftools/annotate/test.yml b/tests/modules/bcftools/annotate/test.yml index 43537180..e8cdcc4b 100644 --- a/tests/modules/bcftools/annotate/test.yml +++ b/tests/modules/bcftools/annotate/test.yml @@ -1,9 +1,19 @@ -- name: bcftools annotate test_bcftools_annotate - command: nextflow run tests/modules/bcftools/annotate -entry test_bcftools_annotate -c tests/config/nextflow.config +- name: bcftools annotate test_bcftools_annotate_out_vcf + command: nextflow run tests/modules/bcftools/annotate -entry test_bcftools_annotate_out_vcf -c tests/config/nextflow.config tags: - bcftools/annotate - bcftools files: - - path: output/bcftools/test_compressed_annotated.vcf.gz + - path: output/bcftools/test_compressed_vcf_annotated.vcf.gz - path: output/bcftools/versions.yml - md5sum: b647b465acc221f6fe6fbcc319724eed + md5sum: de86d4d411baef1aaee0e72f519dbe1f + +- name: bcftools annotate test_bcftools_annotate_out_bcf + command: nextflow run tests/modules/bcftools/annotate -entry test_bcftools_annotate_out_bcf -c tests/config/nextflow.config + tags: + - bcftools/annotate + - bcftools + files: + - path: output/bcftools/test_compressed_bcf_annotated.bcf + - path: output/bcftools/versions.yml + md5sum: a57e62a5a189fe85aabd52c010d88ca6 From c189835b1bb444e5ee87416fdbea66e2c2ba365e Mon Sep 17 00:00:00 2001 From: FriederikeHanssen Date: Mon, 28 Feb 2022 19:08:58 +0100 Subject: [PATCH 010/283] add controlfreec (#1333) * add drafty controlfreec * get sofatware version * use maps in map * update paths to new and soon-to-be merged test files, add more input docu * Stab at documenting args map * Update syntax * Bit more description * Make the linter happy * tests pass locally * Add outputs & docu * tests are failing locally now :/ but cpn file can also be added * All tests passing, need to update test data again to add folder * Clean up files * Clean up files * Clean up files * Don't know how to get the test to run with the direcotry for now. they pass locally though * Make linter happy * Name process back * Update to use tar folder * fix the checksum --- modules/controlfreec/main.nf | 158 ++++++++++++++++++ modules/controlfreec/meta.yml | 183 +++++++++++++++++++++ tests/config/pytest_modules.yml | 4 + tests/config/test_data.config | 7 +- tests/modules/controlfreec/main.nf | 37 +++++ tests/modules/controlfreec/nextflow.config | 26 +++ tests/modules/controlfreec/test.yml | 22 +++ 7 files changed, 436 insertions(+), 1 deletion(-) create mode 100644 modules/controlfreec/main.nf create mode 100644 modules/controlfreec/meta.yml create mode 100644 tests/modules/controlfreec/main.nf create mode 100644 tests/modules/controlfreec/nextflow.config create mode 100644 tests/modules/controlfreec/test.yml diff --git a/modules/controlfreec/main.nf b/modules/controlfreec/main.nf new file mode 100644 index 00000000..21084f64 --- /dev/null +++ b/modules/controlfreec/main.nf @@ -0,0 +1,158 @@ +process CONTROLFREEC { + tag "$meta.id" + label 'process_low' + + conda (params.enable_conda ? "bioconda::control-freec=11.6" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/control-freec:11.6--h1b792b2_1': + 'quay.io/biocontainers/control-freec:11.6--h1b792b2_1' }" + + input: + tuple val(meta), path(mpileup_normal), path(mpileup_tumor), path(cpn_normal), path(cpn_tumor), path(minipileup_normal), path(minipileup_tumor) + path fasta + path fai + path snp_position + path known_snps + path known_snps_tbi + path chr_directory + path mappability + path target_bed + path gccontent_profile + + output: + tuple val(meta), path("*_ratio.BedGraph") , emit: bedgraph, optional: true + tuple val(meta), path("*_control.cpn") , emit: control_cpn + tuple val(meta), path("*_sample.cpn") , emit: sample_cpn + tuple val(meta), path("GC_profile.*.cpn") , emit: gcprofile_cpn, optional:true + tuple val(meta), path("*_BAF.txt") , emit: BAF + tuple val(meta), path("*_CNVs") , emit: CNV + tuple val(meta), path("*_info.txt") , emit: info + tuple val(meta), path("*_ratio.txt") , emit: ratio + tuple val(meta), path("config.txt") , emit: config + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + //"General" configurations + def bedgraphoutput = task.ext.args?["general"]?["bedgraphoutput"] ? "BedGraphOutput = ${task.ext.args["general"]["bedgraphoutput"]}" : "" + def chr_files = chr_directory ? "chrFiles =\${PWD}/${chr_directory}" : "" + def chr_length = fai ? "chrLenFile = \${PWD}/${fai}" : "" + def breakpointthreshold = task.ext.args?["general"]?["breakpointthreshold"] ? "breakPointThreshold = ${task.ext.args["general"]["breakpointthreshold"]}" : "" + def breakpointtype = task.ext.args?["general"]?["breakpointtype"] ? "breakPointType = ${task.ext.args["general"]["breakpointtype"]}" : "" + def coefficientofvariation = task.ext.args?["general"]?["coefficient"] ? "coefficientOfVariation = ${task.ext.args["general"]["coefficientofvariation"]}" : "" + def contamination = task.ext.args?["general"]?["contamination"] ? "contamination = ${task.ext.args["general"]["contamination"]}" : "" + def contaminationadjustment = task.ext.args?["general"]?["contaminationadjustment"] ? "contaminationAdjustment = ${task.ext.args["general"]["contaminationadjustment"]}" : "" + def degree = task.ext.args?["general"]?["degree"] ? "degree = ${task.ext.args["general"]["degree"]}" : "" + def forcegccontentnormalization = task.ext.args?["general"]?["forcegccontentnormalization"] ? "forceGCcontentNormalization = ${task.ext.args["general"]["forcegccontentnormalization"]}" : "" + def gccontentprofile = gccontent_profile ? "GCcontentProfile = ${gccontent_profile}" : "" + def mappability = mappability ? "gemMappabilityFile = \${PWD}/${mappability}" : "" + def intercept = task.ext.args?["general"]?["intercept"] ? "intercept = ${task.ext.args["general"]["intercept"]}" : "" + def mincnalength = task.ext.args?["general"]?["mincnalength"] ? "minCNAlength = ${task.ext.args["general"]["mincnalength"]}" : "" + def minmappabilityperwindow = task.ext.args?["general"]?["minmappabilityperwindow"] ? "minMappabilityPerWindow = ${task.ext.args["general"]["minmappabilityperwindow"]}" : "" + def minexpectedgc = task.ext.args?["general"]?["minexpectedgc"] ? "minExpectedGC = ${task.ext.args["general"]["minexpectedgc"]}" : "" + def maxexpectedgc = task.ext.args?["general"]?["maxexpectedgc"] ? "maxExpectedGC = ${task.ext.args["general"]["maxexpectedgc"]}" : "" + def minimalsubclonepresence = task.ext.args?["general"]?["minimalsubclonepresence"] ? "minimalSubclonePresence = ${task.ext.args["general"]["minimalsubclonepresence"]}" : "" + def noisydata = task.ext.args?["general"]?["noisydata"] ? "noisyData = ${task.ext.args["general"]["noisydata"]}" : "" + def output = task.ext.prefix ? "outputDir = \${PWD}/${task.ext.prefix}" : "" + def ploidy = task.ext.args?["general"]?["ploidy"] ? "ploidy = ${task.ext.args["general"]["ploidy"]}" : "" + def printNA = task.ext.args?["general"]?["printNA"] ? "printNA = ${task.ext.args["general"]["printNA"]}" : "" + def readcountthreshold = task.ext.args?["general"]?["readcountthreshold"] ? "readCountThreshold = ${task.ext.args["general"]["readcountthreshold"]}" : "" + def sex = task.ext.args?["general"]?["sex"] ? "sex = ${task.ext.args["general"]["sex"]}" : "" + def step = task.ext.args?["general"]?["step"] ? "step = ${task.ext.args["general"]["step"]}" : "" + def telocentromeric = task.ext.args?["general"]?["telocentromeric"] ? "telocentromeric = ${task.ext.args["general"]["telocentromeric"]} " : "" + def uniquematch = task.ext.args?["general"]?["uniquematch"] ? "uniqueMatch = ${task.ext.args["general"]["uniquematch"]}" : "" + def window = task.ext.args?["general"]?["window"] ? "window = ${task.ext.args["general"]["window"]}" : "" + + //"Control" configurations + def matefile_normal = mpileup_normal ? "mateFile = \${PWD}/${mpileup_normal}" : "" + def matecopynumberfile_normal = cpn_normal ? "mateCopyNumberFile = \${PWD}/${cpn_normal}" : "" + def minipileup_normal = minipileup_normal ? "miniPileup = \${PWD}/${minipileup_normal}" : "" + def inputformat_normal = task.ext.args?["control"]?["inputformat"] ? "inputFormat = ${task.ext.args["control"]["inputformat"]}" : "" + def mateorientation_normal = task.ext.args?["control"]?["mateorientation"] ? "mateOrientation = ${task.ext.args["control"]["mateorientation"]}" : "" + + //"Sample" configuration + def matefile_tumor = mpileup_tumor ? "mateFile = \${PWD}/${mpileup_tumor}" : "" + def matecopynumberfile_tumor = cpn_tumor ? "mateCopyNumberFile = \${PWD}/${cpn_tumor}" : "" + def minipileup_tumor = minipileup_tumor ? "miniPileup = \${PWD}/${minipileup_tumor}" : "" + def inputformat_tumor = task.ext.args?["sample"]?["inputformat"] ? "inputFormat = ${task.ext.args["sample"]["inputformat"]}" : "" + def mateorientation_tumor = task.ext.args?["sample"]?["mateorientation"] ? "mateOrientation = ${task.ext.args["sample"]["mateorientation"]}" : "" + + //"BAF" configuration + def makepileup = snp_position ? "makePileup = \${PWD}/${snp_position}" : "" + def fastafile = fasta ? "fastaFile = \${PWD}/${fasta}" : "" + def minimalcoverageperposition = task.ext.args?["BAF"]?["minimalcoverageperposition"] ? "minimalCoveragePerPosition = ${task.ext.args["BAF"]["minimalcoverageperposition"]}" : "" + def minimalqualityperposition = task.ext.args?["BAF"]?["minimalqualityperposition"] ? "minimalQualityPerPosition = ${task.ext.args["BAF"]["minimalqualityperposition"]}" : "" + def shiftinquality = task.ext.args?["BAF"]?["shiftinquality"] ? "shiftInQuality = ${task.ext.args["BAF"]["shiftinquality"]}" : "" + def snpfile = known_snps ? "SNPfile = \$PWD/${known_snps}" : "" + + //"Target" configuration + def target_bed = target_bed ? "captureRegions = ${target_bed}" : "" + """ + touch config.txt + + echo "[general]" >> config.txt + echo ${bedgraphoutput} >> config.txt + echo ${breakpointthreshold} >> config.txt + echo ${breakpointtype} >> config.txt + echo ${chr_files} >> config.txt + echo ${chr_length} >> config.txt + echo ${coefficientofvariation} >> config.txt + echo ${contamination} >> config.txt + echo ${contaminationadjustment} >> config.txt + echo ${degree} >> config.txt + echo ${forcegccontentnormalization} >> config.txt + echo ${gccontentprofile} >> config.txt + echo ${mappability} >> config.txt + echo ${intercept} >> config.txt + echo ${mincnalength} >> config.txt + echo ${minmappabilityperwindow} >> config.txt + echo ${minexpectedgc} >> config.txt + echo ${maxexpectedgc} >> config.txt + echo ${minimalsubclonepresence} >> config.txt + echo "maxThreads = ${task.cpus}" >> config.txt + echo ${noisydata} >> config.txt + echo ${output} >> config.txt + echo ${ploidy} >> config.txt + echo ${printNA} >> config.txt + echo ${readcountthreshold} >> config.txt + echo ${sex} >> config.txt + echo ${step} >> config.txt + echo ${telocentromeric} >> config.txt + echo ${uniquematch} >> config.txt + echo ${window} >> config.txt + + echo "[control]" >> config.txt + echo ${matefile_normal} >> config.txt + echo ${matecopynumberfile_normal} >> config.txt + echo ${minipileup_normal} >> config.txt + echo ${inputformat_normal} >> config.txt + echo ${mateorientation_normal} >> config.txt + + echo "[sample]" >> config.txt + echo ${matefile_tumor} >> config.txt + echo ${matecopynumberfile_tumor} >> config.txt + echo ${minipileup_tumor} >> config.txt + echo ${inputformat_tumor} >> config.txt + echo ${mateorientation_tumor} >> config.txt + + echo "[BAF]" >> config.txt + echo ${makepileup} >> config.txt + echo ${fastafile} >> config.txt + echo ${minimalcoverageperposition} >> config.txt + echo ${minimalqualityperposition} >> config.txt + echo ${shiftinquality} >> config.txt + echo ${snpfile} >> config.txt + + echo "[target]" >> config.txt + echo ${target_bed} >> config.txt + + freec -conf config.txt + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + controlfreec: \$(echo \$(freec -version 2>&1) | sed 's/^.*Control-FREEC //; s/:.*\$//' | sed -e "s/Control-FREEC v//g" ) + END_VERSIONS + """ +} diff --git a/modules/controlfreec/meta.yml b/modules/controlfreec/meta.yml new file mode 100644 index 00000000..4d1e8674 --- /dev/null +++ b/modules/controlfreec/meta.yml @@ -0,0 +1,183 @@ +name: controlfreec +description: Copy number and genotype annotation from whole genome and whole exome sequencing data +keywords: + - cna + - cnv + - somatic + - single + - tumor-only +tools: + - controlfreec: + description: Copy number and genotype annotation from whole genome and whole exome sequencing data. + homepage: http://boevalab.inf.ethz.ch/FREEC + documentation: http://boevalab.inf.ethz.ch/FREEC/tutorial.html + tool_dev_url: https://github.com/BoevaLab/FREEC/ + doi: "10.1093/bioinformatics/btq635" + licence: ['GPL >=2'] + +input: + - args: + type: map + description: | + Groovy Map containing tool parameters. MUST follow the structure/keywords below and be provided via modules.config. + parameters can be removed from the map, if they are not set. All value must be surrounded by quotes, meta map parameters can be set with, i.e. sex = meta.sex: + For default values, please check the documentation above. + + ``` + { + [ + "general" :[ + "bedgraphoutput": , + "breakpointthreshold": , + "breakpointtype": , + "coefficientofvariation": , + "contamination": , + "contaminationadjustment": , + "degree": , + "forcegccontentnormalization": , + "gccontentprofile": , + "intercept": , + "mincnalength": , + "minmappabilityperwindow": , + "minexpectedgc": , + "maxexpectedgc": , + "minimalsubclonepresence": , + "noisydata": , + "ploidy": , + "printNA": , + "readcountthreshold": , + "sex": , + "step": , + "telocentromeric": , + "uniquematch": , + "window": + ], + "control":[ + "inputformat": , + "mateorientation": , + ], + "sample":[ + "inputformat": , + "mateorientation": , + ], + "BAF":[ + "minimalcoverageperposition": , + "minimalqualityperposition": , + "shiftinquality": + ] + ] + } + ``` + + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - mateFile_normal: + type: file + description: File with mapped reads + pattern: "*.{sam,bam,pileup(.gz),bowtie(.gz),eland(.gz),arachne(.gz),psl(.gz),bed(.gz)}" + - mateFile_tumor: + type: file + description: File with mapped reads + pattern: "*.{sam,bam,pileup(.gz),bowtie(.gz),eland(.gz),arachne(.gz),psl(.gz),bed(.gz)}" + - cpn_normal: + type: file + description: Raw copy number profiles (optional) + pattern: "*.cpn" + - cpn_tumor: + type: file + description: Raw copy number profiles (optional) + pattern: "*.cpn" + - minipileup_normal: + type: file + description: miniPileup file from previous run (optional) + pattern: "*.pileup" + - minipileup_tumor: + type: file + description: miniPileup file from previous run (optional) + pattern: "*.pileup" + - fasta: + type: file + description: Reference file (optional; required if args 'makePileup' is set) + pattern: "*.{fasta,fna,fa}" + - fai: + type: file + description: Fasta index + pattern: "*.fai" + - snp_position: + type: file + description: + pattern: "*.{}" + - known_snps: + type: file + description: File with known SNPs + pattern: "*.{vcf,vcf.gz}" + - known_snps_tbi: + type: file + description: Index of known_snps + pattern: "*.tbi" + - chr_directory: + type: file + description: Path to directory with chromosome fasta files (optional, required if gccontentprofile is not provided) + pattern: "*/" + - mappability: + type: file + description: Contains information of mappable positions (optional) + pattern: "*.gem" + - target_bed: + type: file + description: Sorted bed file containing capture regions (optional) + pattern: "*.bed" + + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - bedgraph: + type: file + description: Bedgraph format for the UCSC genome browser + pattern: ".bedgraph" + - control_cpn: + type: file + description: files with raw copy number profiles + pattern: "*_control.cpn" + - sample_cpn: + type: file + description: files with raw copy number profiles + pattern: "*_sample.cpn" + - gcprofile_cpn: + type: file + description: file with GC-content profile. + pattern: "GC_profile.*.cpn" + - BAF: + type: file + description: file B-allele frequencies for each possibly heterozygous SNP position + pattern: "*_BAF.txt" + - CNV: + type: file + description: file with coordinates of predicted copy number alterations. + pattern: "*_CNVs" + - info: + type: file + description: parsable file with information about FREEC run + pattern: "*_info.txt" + - ratio: + type: file + description: file with ratios and predicted copy number alterations for each window + pattern: "*_ratio.txt" + - config: + type: file + description: Config file used to run Control-FREEC + pattern: "config.txt" + +authors: + - "@FriederikeHanssen" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index c553b9ce..f3cbaa33 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -356,6 +356,10 @@ cnvkit/batch: - modules/cnvkit/batch/** - tests/modules/cnvkit/batch/** +controlfreec: + - modules/controlfreec/** + - tests/modules/controlfreec/** + cooler/cload: - modules/cooler/cload/** - tests/modules/cooler/cload/** diff --git a/tests/config/test_data.config b/tests/config/test_data.config index 7c3e183c..a3c26bcc 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -123,10 +123,12 @@ params { genome_21_fasta = "${test_data_dir}/genomics/homo_sapiens/genome/chr21/sequence/genome.fasta" genome_21_fasta_fai = "${test_data_dir}/genomics/homo_sapiens/genome/chr21/sequence/genome.fasta.fai" genome_21_dict = "${test_data_dir}/genomics/homo_sapiens/genome/chr21/sequence/genome.dict" + genome_21_sizes = "${test_data_dir}/genomics/homo_sapiens/genome/chr21/sequence/genome.sizes" genome_21_interval_list = "${test_data_dir}/genomics/homo_sapiens/genome/chr21/sequence/genome.interval_list" genome_21_multi_interval_bed = "${test_data_dir}/genomics/homo_sapiens/genome/chr21/sequence/multi_intervals.bed" genome_21_multi_interval_bed_gz = "${test_data_dir}/genomics/homo_sapiens/genome/chr21/sequence/multi_intervals.bed.gz" genome_21_multi_interval_bed_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/genome/chr21/sequence/multi_intervals.bed.gz.tbi" + genome_21_chromosomes_dir = "${test_data_dir}/genomics/homo_sapiens/genome/chr21/sequence/chromosomes.tar.gz" dbsnp_146_hg38_vcf_gz = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/dbsnp_146.hg38.vcf.gz" dbsnp_146_hg38_vcf_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/dbsnp_146.hg38.vcf.gz.tbi" @@ -156,7 +158,7 @@ params { justhusky_ped = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/ped/justhusky.ped" justhusky_minimal_vcf_gz = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/ped/justhusky_minimal.vcf.gz" justhusky_minimal_vcf_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/ped/justhusky_minimal.vcf.gz.tbi" - + vcfanno_tar_gz = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/vcfanno/vcfanno_grch38_module_test.tar.gz" vcfanno_toml = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/vcfanno/vcfanno.toml" } @@ -272,6 +274,9 @@ params { test_genome21_indels_vcf_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/vcf/test.genome_21.somatic_sv.vcf.gz" test_genome21_indels_vcf_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/illumina/vcf/test.genome_21.somatic_sv.vcf.gz.tbi" + test_mpileup = "${test_data_dir}/genomics/homo_sapiens/illumina/mpileup/test.mpileup.gz" + test2_mpileup = "${test_data_dir}/genomics/homo_sapiens/illumina/mpileup/test2.mpileup.gz" + test_broadpeak = "${test_data_dir}/genomics/homo_sapiens/illumina/broadpeak/test.broadPeak" test2_broadpeak = "${test_data_dir}/genomics/homo_sapiens/illumina/broadpeak/test2.broadPeak" diff --git a/tests/modules/controlfreec/main.nf b/tests/modules/controlfreec/main.nf new file mode 100644 index 00000000..576a845c --- /dev/null +++ b/tests/modules/controlfreec/main.nf @@ -0,0 +1,37 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { CONTROLFREEC } from '../../../modules/controlfreec/main.nf' +include { UNTAR } from '../../../modules/untar/main.nf' +workflow test_controlfreec { + + input = [ + [ id:'test', single_end:false, sex:'XX' ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_mpileup'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_mpileup'], checkIfExists: true), + [],[],[],[] + ] + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta_fai'], checkIfExists: true) + + dbsnp = file(params.test_data['homo_sapiens']['genome']['dbsnp_138_hg38_21_vcf_gz'], checkIfExists: true) + dbsnp_tbi = file(params.test_data['homo_sapiens']['genome']['dbsnp_138_hg38_21_vcf_gz_tbi'], checkIfExists: true) + + chrfiles = file(params.test_data['homo_sapiens']['genome']['genome_21_chromosomes_dir'], checkIfExists: true) + target_bed = file(params.test_data['homo_sapiens']['genome']['genome_21_multi_interval_bed'], checkIfExists: true) + + UNTAR(chrfiles) + CONTROLFREEC ( input, + fasta, + fai, + [], + dbsnp, + dbsnp_tbi, + UNTAR.out.untar, + [], + target_bed, + [] + ) +} diff --git a/tests/modules/controlfreec/nextflow.config b/tests/modules/controlfreec/nextflow.config new file mode 100644 index 00000000..5c4250be --- /dev/null +++ b/tests/modules/controlfreec/nextflow.config @@ -0,0 +1,26 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName:CONTROLFREEC{ + ext.args = { [ + "sample":[ + inputformat: 'pileup', + mateorientation: 'FR' + ], + "general" :[ + bedgraphoutput: "TRUE", + noisydata: "TRUE", + minexpectedgc: "0", + readcountthreshold: "1", + sex: meta.sex, + window: "10", + ], + "control":[ + inputformat: "pileup", + mateorientation: "FR" + ] + ] + } + } +} diff --git a/tests/modules/controlfreec/test.yml b/tests/modules/controlfreec/test.yml new file mode 100644 index 00000000..14c30205 --- /dev/null +++ b/tests/modules/controlfreec/test.yml @@ -0,0 +1,22 @@ +- name: controlfreec test_controlfreec + command: nextflow run tests/modules/controlfreec -entry test_controlfreec -c tests/config/nextflow.config + tags: + - controlfreec + files: + - path: output/controlfreec/config.txt + - path: output/controlfreec/test.mpileup.gz_control.cpn + md5sum: 1768b571677c418560e5a8fe203bdc79 + - path: output/controlfreec/test2.mpileup.gz_BAF.txt + md5sum: 3bb7437001cf061a77eaf87b8558c48d + - path: output/controlfreec/test2.mpileup.gz_CNVs + md5sum: 1f4f5834dbd1490afdb22f6d3091c4c9 + - path: output/controlfreec/test2.mpileup.gz_info.txt + md5sum: 1a3055d35028525ccc9e693cc9f335e0 + - path: output/controlfreec/test2.mpileup.gz_ratio.BedGraph + md5sum: 8ba455b232be20cdcc5bf1e4035e8032 + - path: output/controlfreec/test2.mpileup.gz_ratio.txt + md5sum: b76b2434de710325069e37fb1e132760 + - path: output/controlfreec/test2.mpileup.gz_sample.cpn + md5sum: c80dad58a77b1d7ba6d273999f4b4b4b + - path: output/controlfreec/versions.yml + md5sum: ff93f6466d4686aab708425782c6c848 From 950700bcdc0e9a2b6883d40d2c51c6fc435cd714 Mon Sep 17 00:00:00 2001 From: FriederikeHanssen Date: Mon, 28 Feb 2022 23:13:11 +0100 Subject: [PATCH 011/283] Update samtools version (#1361) * Update samtools version * update checksums --- modules/bwa/mem/main.nf | 6 +++--- modules/bwa/sampe/main.nf | 6 +++--- modules/bwa/samse/main.nf | 6 +++--- modules/bwamem2/mem/main.nf | 6 +++--- modules/custom/getchromsizes/main.nf | 6 +++--- modules/qualimap/bamqccram/main.nf | 6 +++--- tests/modules/bwa/sampe/test.yml | 2 +- tests/modules/bwa/samse/test.yml | 2 +- 8 files changed, 20 insertions(+), 20 deletions(-) diff --git a/modules/bwa/mem/main.nf b/modules/bwa/mem/main.nf index 9a91c77f..27ea6f42 100644 --- a/modules/bwa/mem/main.nf +++ b/modules/bwa/mem/main.nf @@ -2,10 +2,10 @@ process BWA_MEM { tag "$meta.id" label 'process_high' - conda (params.enable_conda ? "bioconda::bwa=0.7.17 bioconda::samtools=1.12" : null) + conda (params.enable_conda ? "bioconda::bwa=0.7.17 bioconda::samtools=1.15" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:66ed1b38d280722529bb8a0167b0cf02f8a0b488-0' : - 'quay.io/biocontainers/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:66ed1b38d280722529bb8a0167b0cf02f8a0b488-0' }" + 'https://depot.galaxyproject.org/singularity/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:c56a3aabc8d64e52d5b9da1e8ecec2031668596d-0' : + 'quay.io/biocontainers/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:c56a3aabc8d64e52d5b9da1e8ecec2031668596d-0' }" input: tuple val(meta), path(reads) diff --git a/modules/bwa/sampe/main.nf b/modules/bwa/sampe/main.nf index e781679e..73345d81 100644 --- a/modules/bwa/sampe/main.nf +++ b/modules/bwa/sampe/main.nf @@ -2,10 +2,10 @@ process BWA_SAMPE { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? "bioconda::bwa=0.7.17 bioconda::samtools=1.12" : null) + conda (params.enable_conda ? "bioconda::bwa=0.7.17 bioconda::samtools=1.15" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:66ed1b38d280722529bb8a0167b0cf02f8a0b488-0' : - 'quay.io/biocontainers/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:66ed1b38d280722529bb8a0167b0cf02f8a0b488-0' }" + 'https://depot.galaxyproject.org/singularity/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:c56a3aabc8d64e52d5b9da1e8ecec2031668596d-0' : + 'quay.io/biocontainers/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:c56a3aabc8d64e52d5b9da1e8ecec2031668596d-0' }" input: tuple val(meta), path(reads), path(sai) diff --git a/modules/bwa/samse/main.nf b/modules/bwa/samse/main.nf index ac04c739..2c327d99 100644 --- a/modules/bwa/samse/main.nf +++ b/modules/bwa/samse/main.nf @@ -2,10 +2,10 @@ process BWA_SAMSE { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? "bioconda::bwa=0.7.17 bioconda::samtools=1.12" : null) + conda (params.enable_conda ? "bioconda::bwa=0.7.17 bioconda::samtools=1.15" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:66ed1b38d280722529bb8a0167b0cf02f8a0b488-0' : - 'quay.io/biocontainers/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:66ed1b38d280722529bb8a0167b0cf02f8a0b488-0' }" + 'https://depot.galaxyproject.org/singularity/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:c56a3aabc8d64e52d5b9da1e8ecec2031668596d-0' : + 'quay.io/biocontainers/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:c56a3aabc8d64e52d5b9da1e8ecec2031668596d-0' }" input: tuple val(meta), path(reads), path(sai) diff --git a/modules/bwamem2/mem/main.nf b/modules/bwamem2/mem/main.nf index 56f595ec..21dfb1d6 100644 --- a/modules/bwamem2/mem/main.nf +++ b/modules/bwamem2/mem/main.nf @@ -2,10 +2,10 @@ process BWAMEM2_MEM { tag "$meta.id" label 'process_high' - conda (params.enable_conda ? "bioconda::bwa-mem2=2.2.1 bioconda::samtools=1.12" : null) + conda (params.enable_conda ? "bioconda::bwa-mem2=2.2.1 bioconda::samtools=1.15" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/mulled-v2-e5d375990341c5aef3c9aff74f96f66f65375ef6:cf603b12db30ec91daa04ba45a8ee0f35bbcd1e2-0' : - 'quay.io/biocontainers/mulled-v2-e5d375990341c5aef3c9aff74f96f66f65375ef6:cf603b12db30ec91daa04ba45a8ee0f35bbcd1e2-0' }" + 'https://depot.galaxyproject.org/singularity/mulled-v2-e5d375990341c5aef3c9aff74f96f66f65375ef6:8ee25ae85d7a2bacac3e3139db209aff3d605a18-0' : + 'quay.io/biocontainers/mulled-v2-e5d375990341c5aef3c9aff74f96f66f65375ef6:8ee25ae85d7a2bacac3e3139db209aff3d605a18-0' }" input: tuple val(meta), path(reads) diff --git a/modules/custom/getchromsizes/main.nf b/modules/custom/getchromsizes/main.nf index 39da7d34..bbcfa9be 100644 --- a/modules/custom/getchromsizes/main.nf +++ b/modules/custom/getchromsizes/main.nf @@ -2,10 +2,10 @@ process CUSTOM_GETCHROMSIZES { tag "$fasta" label 'process_low' - conda (params.enable_conda ? "bioconda::samtools=1.14" : null) + conda (params.enable_conda ? "bioconda::samtools=1.15" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/samtools:1.14--hb421002_0' : - 'quay.io/biocontainers/samtools:1.14--hb421002_0' }" + 'https://depot.galaxyproject.org/singularity/samtools:1.15--h1170115_1' : + 'quay.io/biocontainers/samtools:1.15--h1170115_1' }" input: path fasta diff --git a/modules/qualimap/bamqccram/main.nf b/modules/qualimap/bamqccram/main.nf index b9a5538d..ab3fd51a 100644 --- a/modules/qualimap/bamqccram/main.nf +++ b/modules/qualimap/bamqccram/main.nf @@ -2,10 +2,10 @@ process QUALIMAP_BAMQCCRAM { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? "bioconda::qualimap=2.2.2d bioconda::samtools=1.12" : null) + conda (params.enable_conda ? "bioconda::qualimap=2.2.2d bioconda::samtools=1.15" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/mulled-v2-d3934ca6bb4e61334891ffa2e9a4c87a530e3188:4bf11d12f2c3eccf1eb585097c0b6fd31c18c418-0' : - 'quay.io/biocontainers/mulled-v2-d3934ca6bb4e61334891ffa2e9a4c87a530e3188:4bf11d12f2c3eccf1eb585097c0b6fd31c18c418-0' }" + 'https://depot.galaxyproject.org/singularity/mulled-v2-d3934ca6bb4e61334891ffa2e9a4c87a530e3188:9838874d42d4477d5042782ee019cec9854da7d5-0' : + 'quay.io/biocontainers/mulled-v2-d3934ca6bb4e61334891ffa2e9a4c87a530e3188:9838874d42d4477d5042782ee019cec9854da7d5-0' }" input: tuple val(meta), path(cram), path(crai) diff --git a/tests/modules/bwa/sampe/test.yml b/tests/modules/bwa/sampe/test.yml index fb6d7708..bf221ebc 100644 --- a/tests/modules/bwa/sampe/test.yml +++ b/tests/modules/bwa/sampe/test.yml @@ -5,4 +5,4 @@ - bwa/sampe files: - path: output/bwa/test.bam - md5sum: f6ad85d66d44c5d26e692109d2e34100 + md5sum: 01d1d71c88b6de07ed51d1d06e9e970b diff --git a/tests/modules/bwa/samse/test.yml b/tests/modules/bwa/samse/test.yml index 5a2fe1e3..c45f69dc 100644 --- a/tests/modules/bwa/samse/test.yml +++ b/tests/modules/bwa/samse/test.yml @@ -5,4 +5,4 @@ - bwa/samse files: - path: output/bwa/test.bam - md5sum: 27eb91146e45dee65664c18596be4262 + md5sum: ddfa4a8f6b65d44704a2d9528abc7e79 From 7ec09d0ef4df89617baacc9b2dafcddb7cd4b05a Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Tue, 1 Mar 2022 12:41:20 +0100 Subject: [PATCH 012/283] Fix: use actual tar conda environment for untar module (#1362) * fix: remove left-over unnecessary code * Update main.nf * Update modules/untar/main.nf --- modules/untar/main.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/untar/main.nf b/modules/untar/main.nf index 01205e60..bbae948a 100644 --- a/modules/untar/main.nf +++ b/modules/untar/main.nf @@ -2,7 +2,7 @@ process UNTAR { tag "$archive" label 'process_low' - conda (params.enable_conda ? "conda-forge::sed=4.7" : null) + conda (params.enable_conda ? "conda-forge::tar=1.32" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://containers.biocontainers.pro/s3/SingImgsRepo/biocontainers/v1.2.0_cv1/biocontainers_v1.2.0_cv1.img' : 'biocontainers/biocontainers:v1.2.0_cv1' }" From fe4eb459fbc76fbcbf63e204b6f5b79f88dca452 Mon Sep 17 00:00:00 2001 From: Lauri Mesilaakso Date: Tue, 1 Mar 2022 14:29:46 +0100 Subject: [PATCH 013/283] Add stranger (#1360) * Add stranger * Add when clause * Update paths * Update modules/stranger/main.nf Co-authored-by: Mahesh Binzer-Panchal Co-authored-by: Mahesh Binzer-Panchal --- modules/stranger/main.nf | 33 +++++++++++++++++++ modules/stranger/meta.yml | 44 ++++++++++++++++++++++++++ tests/config/pytest_modules.yml | 4 +++ tests/modules/stranger/main.nf | 19 +++++++++++ tests/modules/stranger/nextflow.config | 5 +++ tests/modules/stranger/test.yml | 13 ++++++++ 6 files changed, 118 insertions(+) create mode 100644 modules/stranger/main.nf create mode 100644 modules/stranger/meta.yml create mode 100644 tests/modules/stranger/main.nf create mode 100644 tests/modules/stranger/nextflow.config create mode 100644 tests/modules/stranger/test.yml diff --git a/modules/stranger/main.nf b/modules/stranger/main.nf new file mode 100644 index 00000000..2e647627 --- /dev/null +++ b/modules/stranger/main.nf @@ -0,0 +1,33 @@ +process STRANGER { + tag "$meta.id" + label 'process_low' + + conda (params.enable_conda ? "bioconda::stranger=0.8.1" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/stranger:0.8.1--pyh5e36f6f_0': + 'quay.io/biocontainers/stranger:0.8.1--pyh5e36f6f_0' }" + + input: + tuple val(meta), path(vcf) + + output: + tuple val(meta), path("*.gz"), emit: vcf + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + """ + stranger \\ + $args \\ + $vcf | gzip --no-name > ${prefix}.vcf.gz + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + stranger: \$( stranger --version ) + END_VERSIONS + """ +} diff --git a/modules/stranger/meta.yml b/modules/stranger/meta.yml new file mode 100644 index 00000000..a9a280ad --- /dev/null +++ b/modules/stranger/meta.yml @@ -0,0 +1,44 @@ +name: stranger +description: Annotates output files from ExpansionHunter with the pathologic implications of the repeat sizes. +keywords: + - STR + - repeat_expansions + - annotate + - vcf +tools: + - stranger: + description: Annotate VCF files with str variants + homepage: https://github.com/moonso/stranger + documentation: https://github.com/moonso/stranger + tool_dev_url: https://github.com/moonso/stranger + doi: "10.5281/zenodo.4548873" + licence: ['MIT'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - vcf: + type: file + description: VCF with repeat expansions + pattern: "*.{vcf.gz,vcf}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - vcf: + type: file + description: annotated VCF with keys STR_STATUS, NormalMax and PathologicMin + pattern: "*.{vcf.gz}" + +authors: + - "@ljmesi" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index f3cbaa33..8ec52a63 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -1565,6 +1565,10 @@ star/genomegenerate: - modules/star/genomegenerate/** - tests/modules/star/genomegenerate/** +stranger: + - modules/stranger/** + - tests/modules/stranger/** + strelka/germline: - modules/strelka/germline/** - tests/modules/strelka/germline/** diff --git a/tests/modules/stranger/main.nf b/tests/modules/stranger/main.nf new file mode 100644 index 00000000..bc4bd3ce --- /dev/null +++ b/tests/modules/stranger/main.nf @@ -0,0 +1,19 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { EXPANSIONHUNTER } from '../../../modules/expansionhunter/main.nf' +include { STRANGER } from '../../../modules/stranger/main.nf' + +workflow test_stranger { + + input = [ [ id:'test', gender:'male' ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true), + ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + variant_catalog = file(params.test_data['homo_sapiens']['genome']['repeat_expansions'], checkIfExists: true) + + EXPANSIONHUNTER ( input, fasta, variant_catalog ) + STRANGER ( EXPANSIONHUNTER.out.vcf ) +} diff --git a/tests/modules/stranger/nextflow.config b/tests/modules/stranger/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/stranger/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/stranger/test.yml b/tests/modules/stranger/test.yml new file mode 100644 index 00000000..821928e8 --- /dev/null +++ b/tests/modules/stranger/test.yml @@ -0,0 +1,13 @@ +- name: stranger test_stranger + command: nextflow run tests/modules/stranger -entry test_stranger -c tests/config/nextflow.config + tags: + - stranger + files: + - path: output/expansionhunter/test.vcf + md5sum: cfd4a1d35c0e469b99eb6aaa6d22de76 + - path: output/expansionhunter/versions.yml + md5sum: f3962a6eecfddf9682414c0f605a885a + - path: output/stranger/test.vcf.gz + md5sum: bbe15159195681d5c18596d3ad85c78f + - path: output/stranger/versions.yml + md5sum: 5ec35fd835fb1be50bc3e7c004310fc0 From 4ab098733a8f7df71e6b83af2893f3e6aa3a7bfc Mon Sep 17 00:00:00 2001 From: Benjamin Wingfield Date: Tue, 1 Mar 2022 18:13:05 +0000 Subject: [PATCH 014/283] set memory in MB (#1260) Co-authored-by: Sateesh <33637490+sateeshperi@users.noreply.github.com> --- modules/plink2/extract/main.nf | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/modules/plink2/extract/main.nf b/modules/plink2/extract/main.nf index 89cffdc3..15f9c038 100644 --- a/modules/plink2/extract/main.nf +++ b/modules/plink2/extract/main.nf @@ -23,11 +23,13 @@ process PLINK2_EXTRACT { def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" if( "$pgen" == "${prefix}.pgen" ) error "Input and output names are the same, use \"task.ext.prefix\" in modules.config to disambiguate!" + def mem_mb = task.memory.toMega() """ plink2 \\ + --threads $task.cpus \\ + --memory $mem_mb \\ --pfile ${pgen.baseName} \\ $args \\ - --threads $task.cpus \\ --extract $variants \\ --make-pgen vzs \\ --out ${prefix} From 8e5eaf1fa442225f8d24e95bba2a19a7e2fc9ce9 Mon Sep 17 00:00:00 2001 From: Benjamin Wingfield Date: Wed, 2 Mar 2022 00:43:28 +0000 Subject: [PATCH 015/283] Update plink2/vcf to output zstandard compressed data automatically (#1258) * update plink2/vcf to output zstandard compressed data automatically * update meta * set plink CPU and memory usage Co-authored-by: Sateesh <33637490+sateeshperi@users.noreply.github.com> --- modules/plink2/vcf/main.nf | 12 ++++++++---- modules/plink2/vcf/meta.yml | 2 +- tests/modules/plink2/vcf/test.yml | 10 ++++++---- 3 files changed, 15 insertions(+), 9 deletions(-) diff --git a/modules/plink2/vcf/main.nf b/modules/plink2/vcf/main.nf index 041cb4b9..090851a8 100644 --- a/modules/plink2/vcf/main.nf +++ b/modules/plink2/vcf/main.nf @@ -11,10 +11,10 @@ process PLINK2_VCF { tuple val(meta), path(vcf) output: - tuple val(meta), path("*.pgen"), emit: pgen - tuple val(meta), path("*.psam"), emit: psam - tuple val(meta), path("*.pvar"), emit: pvar - path "versions.yml" , emit: versions + tuple val(meta), path("*.pgen") , emit: pgen + tuple val(meta), path("*.psam") , emit: psam + tuple val(meta), path("*.pvar.zst"), emit: pvar + path "versions.yml" , emit: versions when: task.ext.when == null || task.ext.when @@ -22,10 +22,14 @@ process PLINK2_VCF { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" + def mem_mb = task.memory.toMega() """ plink2 \\ + --threads $task.cpus \\ + --memory $mem_mb \\ $args \\ --vcf $vcf \\ + --make-pgen vzs \\ --out ${prefix} cat <<-END_VERSIONS > versions.yml diff --git a/modules/plink2/vcf/meta.yml b/modules/plink2/vcf/meta.yml index 5697ebef..3c776daf 100644 --- a/modules/plink2/vcf/meta.yml +++ b/modules/plink2/vcf/meta.yml @@ -46,7 +46,7 @@ output: - pvar: type: file description: PLINK 2 variant information file - pattern: "*.{psam}" + pattern: "*.{pvar.zst}" authors: - "@nebfield" diff --git a/tests/modules/plink2/vcf/test.yml b/tests/modules/plink2/vcf/test.yml index 52f58a42..d354af48 100644 --- a/tests/modules/plink2/vcf/test.yml +++ b/tests/modules/plink2/vcf/test.yml @@ -1,12 +1,14 @@ - name: plink2 vcf test_plink2_vcf - command: nextflow run ./tests/modules/plink2/vcf -entry test_plink2_vcf -c ./tests/config/nextflow.config -c ./tests/modules/plink2/vcf/nextflow.config + command: nextflow run tests/modules/plink2/vcf -entry test_plink2_vcf -c tests/config/nextflow.config tags: - - plink2/vcf - plink2 + - plink2/vcf files: - path: output/plink2/test.pgen md5sum: d66d3cd4a6c9cca1a4073d7f4b277041 - path: output/plink2/test.psam md5sum: dc3b77d7753a7bed41734323e3549b10 - - path: output/plink2/test.pvar - md5sum: d61e53f847a6335138b584216b4e45d0 + - path: output/plink2/test.pvar.zst + md5sum: b53cccb83e024a39789af5eab8de1c28 + - path: output/plink2/versions.yml + md5sum: 82ada74bc81473b7cba377f696acf54c From 4983f77796ce7e367df6e1dd68ffd24cc924074f Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Wed, 2 Mar 2022 13:04:22 +0100 Subject: [PATCH 016/283] Add `hamronization/deeparg` (#1364) * fix: remove left-over unnecessary code * Add hamronizer/deeparg * Add when condition * Apply suggestions from code review * Update modules/hamronization/deeparg/meta.yml --- modules/hamronization/deeparg/main.nf | 44 ++++++++++++++ modules/hamronization/deeparg/meta.yml | 60 +++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/config/test_data.config | 4 +- tests/modules/hamronization/deeparg/main.nf | 15 +++++ .../hamronization/deeparg/nextflow.config | 5 ++ tests/modules/hamronization/deeparg/test.yml | 8 +++ 7 files changed, 139 insertions(+), 1 deletion(-) create mode 100644 modules/hamronization/deeparg/main.nf create mode 100644 modules/hamronization/deeparg/meta.yml create mode 100644 tests/modules/hamronization/deeparg/main.nf create mode 100644 tests/modules/hamronization/deeparg/nextflow.config create mode 100644 tests/modules/hamronization/deeparg/test.yml diff --git a/modules/hamronization/deeparg/main.nf b/modules/hamronization/deeparg/main.nf new file mode 100644 index 00000000..ebfdcf17 --- /dev/null +++ b/modules/hamronization/deeparg/main.nf @@ -0,0 +1,44 @@ +process HAMRONIZATION_DEEPARG { + tag "$meta.id" + label 'process_low' + + conda (params.enable_conda ? "bioconda::hamronization=1.0.3" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/hamronization:1.0.3--py_0': + 'quay.io/biocontainers/hamronization:1.0.3--py_0' }" + + input: + tuple val(meta), path(report) + val(format) + val(software_version) + val(reference_db_version) + + output: + tuple val(meta), path("*.json"), optional: true, emit: json + tuple val(meta), path("*.tsv") , optional: true, emit: tsv + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + """ + hamronize \\ + deeparg \\ + ${report} \\ + $args \\ + --format ${format} \\ + --analysis_software_version ${software_version} \\ + --reference_database_version ${reference_db_version} \\ + --input_file_name ${prefix} \\ + > ${prefix}.${format} + + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + hamronization: \$(echo \$(hamronize --version 2>&1) | cut -f 2 -d ' ' ) + END_VERSIONS + """ +} diff --git a/modules/hamronization/deeparg/meta.yml b/modules/hamronization/deeparg/meta.yml new file mode 100644 index 00000000..0747700e --- /dev/null +++ b/modules/hamronization/deeparg/meta.yml @@ -0,0 +1,60 @@ +name: hamronization_deeparg +description: Tool to convert and summarize DeepARG outputs using the hAMRonization specification +keywords: + - amr + - antimicrobial resistance + - reporting + - deeparg +tools: + - hamronization: + description: Tool to convert and summarize AMR gene detection outputs using the hAMRonization specification + homepage: https://github.com/pha4ge/hAMRonization/blob/master/README.md + documentation: https://github.com/pha4ge/hAMRonization/blob/master/README.md + tool_dev_url: https://github.com/pha4ge/hAMRonization + doi: "" + licence: ['GNU Lesser General Public v3 (LGPL v3)'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - report: + type: file + description: Output .mapping.ARG file from DeepARG + pattern: "*.mapping.ARG" + - format: + type: value + description: Type of report file to be produced + pattern: "tsv|json" + - software_version: + type: value + description: Version of DeepARG used + pattern: "[0-9].[0-9].[0-9]" + - reference_db_version: + type: value + description: Database version of DeepARG used + pattern: "[0-9]" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - json: + type: file + description: hAMRonised report in JSON format + pattern: "*.json" + - tsv: + type: file + description: hAMRonised report in TSV format + pattern: "*.json" + +authors: + - "@jfy133" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 8ec52a63..7146639c 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -752,6 +752,10 @@ gunzip: - modules/gunzip/** - tests/modules/gunzip/** +hamronization/deeparg: + - modules/hamronization/deeparg/** + - tests/modules/hamronization/deeparg/** + hicap: - modules/hicap/** - tests/modules/hicap/** diff --git a/tests/config/test_data.config b/tests/config/test_data.config index a3c26bcc..dda10192 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -68,7 +68,7 @@ params { test_computematrix_mat_gz = "${test_data_dir}/genomics/sarscov2/illumina/deeptools/test.computeMatrix.mat.gz" test_bcf = "${test_data_dir}/genomics/sarscov2/illumina/vcf/test.bcf" - + test_vcf = "${test_data_dir}/genomics/sarscov2/illumina/vcf/test.vcf" test_vcf_gz = "${test_data_dir}/genomics/sarscov2/illumina/vcf/test.vcf.gz" test_vcf_gz_tbi = "${test_data_dir}/genomics/sarscov2/illumina/vcf/test.vcf.gz.tbi" @@ -321,6 +321,8 @@ params { 'genome' { genome_fna_gz = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/genome/genome.fna.gz" genome_paf = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/genome/genome.paf" + genome_mapping_potential_arg = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/genome/genome.mapping.potential.ARG" + } 'illumina' { test1_contigs_fa_gz = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/illumina/fasta/test1.contigs.fa.gz" diff --git a/tests/modules/hamronization/deeparg/main.nf b/tests/modules/hamronization/deeparg/main.nf new file mode 100644 index 00000000..9888bc42 --- /dev/null +++ b/tests/modules/hamronization/deeparg/main.nf @@ -0,0 +1,15 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { HAMRONIZATION_DEEPARG } from '../../../../modules/hamronization/deeparg/main.nf' + +workflow test_hamronization_deeparg { + + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['bacteroides_fragilis']['genome']['genome_mapping_potential_arg'], checkIfExists: true), + ] + + HAMRONIZATION_DEEPARG ( input, 'tsv', '1.0.2', '2' ) +} diff --git a/tests/modules/hamronization/deeparg/nextflow.config b/tests/modules/hamronization/deeparg/nextflow.config new file mode 100644 index 00000000..50f50a7a --- /dev/null +++ b/tests/modules/hamronization/deeparg/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} \ No newline at end of file diff --git a/tests/modules/hamronization/deeparg/test.yml b/tests/modules/hamronization/deeparg/test.yml new file mode 100644 index 00000000..4884ac6c --- /dev/null +++ b/tests/modules/hamronization/deeparg/test.yml @@ -0,0 +1,8 @@ +- name: hamronization deeparg test_hamronization_deeparg + command: nextflow run tests/modules/hamronization/deeparg -entry test_hamronization_deeparg -c tests/config/nextflow.config + tags: + - hamronization + - hamronization/deeparg + files: + - path: output/hamronization/test.tsv + md5sum: 3c315605aca0c5964796bb5fd4cdd522 From 2cd502a236aec1a1eecbe1f9189af3414efbf1d8 Mon Sep 17 00:00:00 2001 From: Michael J Cipriano <42848032+mjcipriano@users.noreply.github.com> Date: Wed, 2 Mar 2022 08:27:38 -0500 Subject: [PATCH 017/283] Faqcs patch (#1367) * faqcs update to capture debug output * change paths Co-authored-by: Cipriano --- modules/faqcs/main.nf | 1 + modules/faqcs/meta.yml | 4 ++++ 2 files changed, 5 insertions(+) diff --git a/modules/faqcs/main.nf b/modules/faqcs/main.nf index a03a0150..24f81a95 100644 --- a/modules/faqcs/main.nf +++ b/modules/faqcs/main.nf @@ -13,6 +13,7 @@ process FAQCS { output: tuple val(meta), path('*.trimmed.fastq.gz') , emit: reads tuple val(meta), path('*.stats.txt') , emit: stats + tuple val(meta), path('*.txt') , optional:true, emit: txt tuple val(meta), path('*_qc_report.pdf') , optional:true, emit: statspdf tuple val(meta), path('*.log') , emit: log tuple val(meta), path('*.discard.fastq.gz') , optional:true, emit: reads_fail diff --git a/modules/faqcs/meta.yml b/modules/faqcs/meta.yml index eca35e65..1161a13d 100644 --- a/modules/faqcs/meta.yml +++ b/modules/faqcs/meta.yml @@ -54,6 +54,10 @@ output: type: file description: trimming/qc text stats file pattern: "*.stats.txt" + - txt: + type: file + description: trimming/qc text txt files from --debug option + pattern: "*.txt" - statspdf: type: file description: trimming/qc pdf report file From fba1b381f5629d2dfce3ec2350dc03821fd275ad Mon Sep 17 00:00:00 2001 From: Michael J Cipriano <42848032+mjcipriano@users.noreply.github.com> Date: Wed, 2 Mar 2022 08:53:05 -0500 Subject: [PATCH 018/283] Faqcs patch to add tests for --debug output (#1368) * faqcs update to capture debug output * change paths * change nf.config on faqcs * Updated test,yml on faqcs Co-authored-by: Cipriano --- tests/modules/faqcs/nextflow.config | 1 + tests/modules/faqcs/test.yml | 24 ++++++++++++++++++++++++ 2 files changed, 25 insertions(+) diff --git a/tests/modules/faqcs/nextflow.config b/tests/modules/faqcs/nextflow.config index 8730f1c4..a75988d0 100644 --- a/tests/modules/faqcs/nextflow.config +++ b/tests/modules/faqcs/nextflow.config @@ -1,5 +1,6 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + ext.args = {"--debug" } } diff --git a/tests/modules/faqcs/test.yml b/tests/modules/faqcs/test.yml index 47f973f3..ef50d30d 100644 --- a/tests/modules/faqcs/test.yml +++ b/tests/modules/faqcs/test.yml @@ -3,8 +3,20 @@ tags: - faqcs files: + - path: output/faqcs/qa.test.base_content.txt + md5sum: f992603f01ca430c03c8aae02eba2f5d + - path: output/faqcs/qa.test.for_qual_histogram.txt + md5sum: a3d462ab84151e982f99f85f52c21de3 + - path: output/faqcs/qa.test.length_count.txt + md5sum: 80915f09fbaf5884c32e95acab2d031c + - path: output/faqcs/test.base_content.txt + md5sum: f992603f01ca430c03c8aae02eba2f5d - path: output/faqcs/test.fastp.log md5sum: be79dc893f87de1f82faf749cdfb848c + - path: output/faqcs/test.for_qual_histogram.txt + md5sum: a3d462ab84151e982f99f85f52c21de3 + - path: output/faqcs/test.length_count.txt + md5sum: 80915f09fbaf5884c32e95acab2d031c - path: output/faqcs/test.stats.txt md5sum: ea20e93706b2e4c676004253baa3cec6 - path: output/faqcs/test.trimmed.fastq.gz @@ -18,8 +30,20 @@ tags: - faqcs files: + - path: output/faqcs/qa.test.base_content.txt + md5sum: 99aa9a775ccd8d6503f0cf80f775203c + - path: output/faqcs/qa.test.for_qual_histogram.txt + md5sum: 4f4b131be5425bdfa4b3237e44fa7d48 + - path: output/faqcs/qa.test.length_count.txt + md5sum: 420298983c762754d5b0ef32c9d5dad4 + - path: output/faqcs/test.base_content.txt + md5sum: 99aa9a775ccd8d6503f0cf80f775203c - path: output/faqcs/test.fastp.log md5sum: be79dc893f87de1f82faf749cdfb848c + - path: output/faqcs/test.for_qual_histogram.txt + md5sum: 4f4b131be5425bdfa4b3237e44fa7d48 + - path: output/faqcs/test.length_count.txt + md5sum: 420298983c762754d5b0ef32c9d5dad4 - path: output/faqcs/test.stats.txt md5sum: 9a693f8af94ab8c485519d9a523aa622 - path: output/faqcs/test_1.trimmed.fastq.gz From 61c88c623389ba4646e5f01547db3050f0abd7fc Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Thu, 3 Mar 2022 08:02:04 +0100 Subject: [PATCH 019/283] Add `hamronization/summarize` (#1366) * fix: remove left-over unnecessary code * Add hamronizer/deeparg * Add hamronisation/summarise * Update test.yml * Update modules/hamronization/summarize/meta.yml Co-authored-by: Jasmin F <73216762+jasmezz@users.noreply.github.com> * line up outputs Co-authored-by: Robert A. Petit III Co-authored-by: Jasmin F <73216762+jasmezz@users.noreply.github.com> --- modules/hamronization/summarize/main.nf | 38 ++++++++++++++++ modules/hamronization/summarize/meta.yml | 45 +++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/hamronization/summarize/main.nf | 36 +++++++++++++++ .../hamronization/summarize/nextflow.config | 5 +++ .../modules/hamronization/summarize/test.yml | 14 ++++++ 6 files changed, 142 insertions(+) create mode 100644 modules/hamronization/summarize/main.nf create mode 100644 modules/hamronization/summarize/meta.yml create mode 100644 tests/modules/hamronization/summarize/main.nf create mode 100644 tests/modules/hamronization/summarize/nextflow.config create mode 100644 tests/modules/hamronization/summarize/test.yml diff --git a/modules/hamronization/summarize/main.nf b/modules/hamronization/summarize/main.nf new file mode 100644 index 00000000..9b10de80 --- /dev/null +++ b/modules/hamronization/summarize/main.nf @@ -0,0 +1,38 @@ +process HAMRONIZATION_SUMMARIZE { + label 'process_low' + + conda (params.enable_conda ? "bioconda::hamronization=1.0.3" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/hamronization:1.0.3--py_0': + 'quay.io/biocontainers/hamronization:1.0.3--py_0' }" + + input: + path(reports) + val(format) + + output: + path("hamronization_combined_report.json"), optional: true, emit: json + path("hamronization_combined_report.tsv") , optional: true, emit: tsv + path("hamronization_combined_report.html"), optional: true, emit: html + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def outformat = format == 'interactive' ? 'html' : format + """ + hamronize \\ + summarize \\ + ${reports.join(' ')} \\ + -t ${format} \\ + $args \\ + -o hamronization_combined_report.${outformat} + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + hamronization: \$(echo \$(hamronize --version 2>&1) | cut -f 2 -d ' ' ) + END_VERSIONS + """ +} diff --git a/modules/hamronization/summarize/meta.yml b/modules/hamronization/summarize/meta.yml new file mode 100644 index 00000000..7665c2c5 --- /dev/null +++ b/modules/hamronization/summarize/meta.yml @@ -0,0 +1,45 @@ +name: hamronization_summarize +description: Tool to summarize and combine all hAMRonization reports into a single file +keywords: + - amr + - antimicrobial resistance + - reporting +tools: + - hamronization: + description: Tool to convert and summarize AMR gene detection outputs using the hAMRonization specification + homepage: https://github.com/pha4ge/hAMRonization/blob/master/README.md + documentation: https://github.com/pha4ge/hAMRonization/blob/master/README.md + tool_dev_url: https://github.com/pha4ge/hAMRonization + doi: "" + licence: ['GNU Lesser General Public v3 (LGPL v3)'] + +input: + - reports: + type: file + description: List of multiple hAMRonization reports in either JSON or TSV format + pattern: "*.{json,tsv}" + - format: + type: value + description: Type of final combined report file to be produced + pattern: "tsv|json|interactive" + +output: + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - json: + type: file + description: hAMRonised summary in JSON format + pattern: "*.json" + - tsv: + type: file + description: hAMRonised summary in TSV format + pattern: "*.json" + - html: + type: file + description: hAMRonised summary in HTML format + pattern: "*.html" + +authors: + - "@jfy133" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 7146639c..de03a379 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -756,6 +756,10 @@ hamronization/deeparg: - modules/hamronization/deeparg/** - tests/modules/hamronization/deeparg/** +hamronization/summarize: + - modules/hamronization/summarize/** + - tests/modules/hamronization/summarize/** + hicap: - modules/hicap/** - tests/modules/hicap/** diff --git a/tests/modules/hamronization/summarize/main.nf b/tests/modules/hamronization/summarize/main.nf new file mode 100644 index 00000000..e0eae4a4 --- /dev/null +++ b/tests/modules/hamronization/summarize/main.nf @@ -0,0 +1,36 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { HAMRONIZATION_DEEPARG } from '../../../../modules/hamronization/deeparg/main.nf' +include { HAMRONIZATION_DEEPARG as HAMRONIZATION_DEEPARG_SECOND } from '../../../../modules/hamronization/deeparg/main.nf' +include { HAMRONIZATION_SUMMARIZE } from '../../../../modules/hamronization/summarize/main.nf' + +workflow test_hamronization_summarize { + + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['bacteroides_fragilis']['genome']['genome_mapping_potential_arg'], checkIfExists: true), + ] + + input2 = [ + [ id:'test2', single_end:false ], // meta map + file(params.test_data['bacteroides_fragilis']['genome']['genome_mapping_potential_arg'], checkIfExists: true), + ] + + HAMRONIZATION_DEEPARG ( input, 'tsv', '1.0.2', '2' ) + HAMRONIZATION_DEEPARG_SECOND ( input2, 'tsv', '1.0.2', '2' ) + + ch_deeparg_run_one = HAMRONIZATION_DEEPARG.out.tsv + ch_deeparg_run_two = HAMRONIZATION_DEEPARG_SECOND.out.tsv + + ch_deeparg_run_one + .mix( ch_deeparg_run_two ) + .map{ + [ it[1] ] + } + .collect() + .set { ch_input_for_summarize } + + HAMRONIZATION_SUMMARIZE ( ch_input_for_summarize , 'json' ) +} diff --git a/tests/modules/hamronization/summarize/nextflow.config b/tests/modules/hamronization/summarize/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/hamronization/summarize/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/hamronization/summarize/test.yml b/tests/modules/hamronization/summarize/test.yml new file mode 100644 index 00000000..ad883971 --- /dev/null +++ b/tests/modules/hamronization/summarize/test.yml @@ -0,0 +1,14 @@ +- name: hamronization summarize test_hamronization_summarize + command: nextflow run tests/modules/hamronization/summarize -entry test_hamronization_summarize -c tests/config/nextflow.config + tags: + - hamronization + - hamronization/summarize + files: + - path: output/hamronization/hamronization_combined_report.json + md5sum: 1623b6cc3b213208a425e023edd94691 + - path: output/hamronization/test.tsv + md5sum: 3c315605aca0c5964796bb5fd4cdd522 + - path: output/hamronization/test2.tsv + md5sum: 453f38502e35261a50a0849dca34f05b + - path: output/hamronization/versions.yml + md5sum: 99b5046fac643e16ca3362d1baf3284b From 0816df1e8b9cb523e7eebe4da55961189117fe34 Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Thu, 3 Mar 2022 10:01:39 +0100 Subject: [PATCH 020/283] Fix: removes hardcoded flags and 'custom' file output for AdapterRemoval (#1357) * fix: remove left-over unnecessary code * Removes hardcoded flags and more explicit output * Fix test md5 --- modules/adapterremoval/main.nf | 28 ++++++++-------- modules/adapterremoval/meta.yml | 46 +++++++++++++++++++++++---- tests/modules/adapterremoval/test.yml | 31 ++++++++++++------ 3 files changed, 77 insertions(+), 28 deletions(-) diff --git a/modules/adapterremoval/main.nf b/modules/adapterremoval/main.nf index 55c5f17c..77838287 100644 --- a/modules/adapterremoval/main.nf +++ b/modules/adapterremoval/main.nf @@ -11,9 +11,15 @@ process ADAPTERREMOVAL { tuple val(meta), path(reads) output: - tuple val(meta), path('*.fastq.gz'), emit: reads - tuple val(meta), path('*.log') , emit: log - path "versions.yml" , emit: versions + tuple val(meta), path('*.truncated.gz') , optional: true, emit: singles_truncated + tuple val(meta), path('*.discarded.gz') , optional: true, emit: discarded + tuple val(meta), path('*.pair1.truncated.gz') , optional: true, emit: pair1_truncated + tuple val(meta), path('*.pair2.truncated.gz') , optional: true, emit: pair2_truncated + tuple val(meta), path('*.collapsed.gz') , optional: true, emit: collapsed + tuple val(meta), path('*.collapsed.truncated') , optional: true, emit: collapsed_truncated + tuple val(meta), path('*paired.gz') , optional: true, emit: paired_interleaved + tuple val(meta), path('*.log') , emit: log + path "versions.yml" , emit: versions when: task.ext.when == null || task.ext.when @@ -28,30 +34,27 @@ process ADAPTERREMOVAL { --file1 $reads \\ $args \\ --basename $prefix \\ - --threads $task.cpus \\ + --threads ${task.cpus} \\ --settings ${prefix}.log \\ - --output1 ${prefix}.trimmed.fastq.gz \\ --seed 42 \\ - --gzip \\ + --gzip cat <<-END_VERSIONS > versions.yml "${task.process}": adapterremoval: \$(AdapterRemoval --version 2>&1 | sed -e "s/AdapterRemoval ver. //g") END_VERSIONS """ - } else if (!meta.single_end && !meta.collapse) { + } else if (!meta.single_end ) { """ AdapterRemoval \\ --file1 ${reads[0]} \\ --file2 ${reads[1]} \\ $args \\ --basename $prefix \\ - --threads $task.cpus \\ + --threads ${task.cpus} \\ --settings ${prefix}.log \\ - --output1 ${prefix}.pair1.trimmed.fastq.gz \\ - --output2 ${prefix}.pair2.trimmed.fastq.gz \\ --seed 42 \\ - --gzip \\ + --gzip cat <<-END_VERSIONS > versions.yml "${task.process}": @@ -63,13 +66,12 @@ process ADAPTERREMOVAL { AdapterRemoval \\ --file1 ${reads[0]} \\ --file2 ${reads[1]} \\ - --collapse \\ $args \\ --basename $prefix \\ --threads $task.cpus \\ --settings ${prefix}.log \\ --seed 42 \\ - --gzip \\ + --gzip cat *.collapsed.gz *.collapsed.truncated.gz > ${prefix}.merged.fastq.gz cat <<-END_VERSIONS > versions.yml diff --git a/modules/adapterremoval/meta.yml b/modules/adapterremoval/meta.yml index 4923fa42..a9a071f5 100644 --- a/modules/adapterremoval/meta.yml +++ b/modules/adapterremoval/meta.yml @@ -17,13 +17,13 @@ input: type: map description: | Groovy Map containing sample information - e.g. [ id:'test', single_end:false, collapse:false ] + e.g. [ id:'test', single_end:false ] - reads: type: file description: | List of input FastQ files of size 1 and 2 for single-end and paired-end data, respectively. - pattern: "*.{fq,fastq,fg.gz,fastq.gz}" + pattern: "*.{fq,fastq,fq.gz,fastq.gz}" output: - meta: @@ -31,12 +31,45 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - reads: + - singles_truncated: type: file description: | - List of input adapter trimmed FastQ files of size 1 or 2 for - single-end or collapsed data and paired-end data, respectively. - pattern: "*.{fastq.gz}" + Adapter trimmed FastQ files of either single-end reads, or singleton + 'orphaned' reads from merging of paired-end data (i.e., one of the pair + was lost due to filtering thresholds). + pattern: "*.truncated.gz" + - discarded: + type: file + description: | + Adapter trimmed FastQ files of reads that did not pass filtering + thresholds. + pattern: "*.discarded.gz" + - pair1_truncated: + type: file + description: | + Adapter trimmed R1 FastQ files of paired-end reads that did not merge + with their respective R2 pair due to long templates. The respective pair + is stored in 'pair2_truncated'. + pattern: "*.pair1.truncated.gz" + - pair2_truncated: + type: file + description: | + Adapter trimmed R2 FastQ files of paired-end reads that did not merge + with their respective R1 pair due to long templates. The respective pair + is stored in 'pair1_truncated'. + pattern: "*.pair2.truncated.gz" + - collapsed: + type: file + description: | + Collapsed FastQ of paired-end reads that successfully merged with their + respective R1 pair but were not trimmed. + pattern: "*.collapsed.gz" + - collapsed_truncated: + type: file + description: | + Collapsed FastQ of paired-end reads that successfully merged with their + respective R1 pair and were trimmed of adapter due to sufficient overlap. + pattern: "*.collapsed.truncated.gz" - log: type: file description: AdapterRemoval log file @@ -48,3 +81,4 @@ output: authors: - "@maxibor" + - "@jfy133" diff --git a/tests/modules/adapterremoval/test.yml b/tests/modules/adapterremoval/test.yml index a6c4a6cf..805af9fc 100644 --- a/tests/modules/adapterremoval/test.yml +++ b/tests/modules/adapterremoval/test.yml @@ -1,31 +1,44 @@ - name: adapterremoval test_adapterremoval_single_end - command: nextflow run ./tests/modules/adapterremoval -entry test_adapterremoval_single_end -c ./tests/config/nextflow.config -c ./tests/modules/adapterremoval/nextflow.config + command: nextflow run tests/modules/adapterremoval -entry test_adapterremoval_single_end -c tests/config/nextflow.config tags: - adapterremoval files: + - path: output/adapterremoval/test.discarded.gz - path: output/adapterremoval/test.log md5sum: 2fd3d5d703b63ba33a83021fccf25f77 - - path: output/adapterremoval/test.trimmed.fastq.gz + - path: output/adapterremoval/test.truncated.gz md5sum: 62139afee94defad5b83bdd0b8475a1f + - path: output/adapterremoval/versions.yml + md5sum: ac5b46719719b7ee62739530b80869fc - name: adapterremoval test_adapterremoval_paired_end - command: nextflow run ./tests/modules/adapterremoval -entry test_adapterremoval_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/adapterremoval/nextflow.config + command: nextflow run tests/modules/adapterremoval -entry test_adapterremoval_paired_end -c tests/config/nextflow.config tags: - adapterremoval files: + - path: output/adapterremoval/test.discarded.gz - path: output/adapterremoval/test.log md5sum: b8a451d3981b327f3fdb44f40ba2d6d1 - - path: output/adapterremoval/test.pair1.trimmed.fastq.gz + - path: output/adapterremoval/test.pair1.truncated.gz md5sum: 294a6277f0139bd597e57c6fa31f39c7 - - path: output/adapterremoval/test.pair2.trimmed.fastq.gz + - path: output/adapterremoval/test.pair2.truncated.gz md5sum: de7b38e2c881bced8671acb1ab452d78 + - path: output/adapterremoval/test.singleton.truncated.gz + - path: output/adapterremoval/versions.yml + md5sum: fa621c887897da5a379c719399c17db7 - name: adapterremoval test_adapterremoval_paired_end_collapse - command: nextflow run ./tests/modules/adapterremoval -entry test_adapterremoval_paired_end_collapse -c ./tests/config/nextflow.config -c ./tests/modules/adapterremoval/nextflow.config + command: nextflow run tests/modules/adapterremoval -entry test_adapterremoval_paired_end_collapse -c tests/config/nextflow.config tags: - adapterremoval files: + - path: output/adapterremoval/test.discarded.gz - path: output/adapterremoval/test.log - md5sum: 7f0b2328152226e46101a535cce718b3 - - path: output/adapterremoval/test.merged.fastq.gz - md5sum: 07a8f725bfd3ecbeabdc41b32d898dee + md5sum: b8a451d3981b327f3fdb44f40ba2d6d1 + - path: output/adapterremoval/test.pair1.truncated.gz + md5sum: 294a6277f0139bd597e57c6fa31f39c7 + - path: output/adapterremoval/test.pair2.truncated.gz + md5sum: de7b38e2c881bced8671acb1ab452d78 + - path: output/adapterremoval/test.singleton.truncated.gz + - path: output/adapterremoval/versions.yml + md5sum: fd428f92a8446e0b34c5ae1c447215b8 From c450b08a75cda8878876ccbbe42493d6774397bd Mon Sep 17 00:00:00 2001 From: FriederikeHanssen Date: Thu, 3 Mar 2022 11:54:07 +0100 Subject: [PATCH 021/283] separate gvcf and vcf in output channels (#1371) * separate gvcf and vcf in output channels * regex not working, just using prefix now --- modules/deepvariant/main.nf | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/modules/deepvariant/main.nf b/modules/deepvariant/main.nf index c5e81997..8e5f10df 100644 --- a/modules/deepvariant/main.nf +++ b/modules/deepvariant/main.nf @@ -17,8 +17,8 @@ process DEEPVARIANT { path(fai) output: - tuple val(meta), path("*.vcf.gz") , emit: vcf - tuple val(meta), path("*g.vcf.gz"), emit: gvcf + tuple val(meta), path("${prefix}.vcf.gz") , emit: vcf + tuple val(meta), path("${prefix}.g.vcf.gz"), emit: gvcf path "versions.yml" , emit: versions when: @@ -26,7 +26,7 @@ process DEEPVARIANT { script: def args = task.ext.args ?: '' - def prefix = task.ext.prefix ?: "${meta.id}" + prefix = task.ext.prefix ?: "${meta.id}" def regions = intervals ? "--regions ${intervals}" : "" """ From 76cdd46f3f8a77fb5023fb5a39c4ab99925b8b56 Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Thu, 3 Mar 2022 16:35:19 +0100 Subject: [PATCH 022/283] Add meta to MALT/RUN (#1372) * Add meta to MALT/RUN * Update modules/malt/run/main.nf --- modules/malt/run/main.nf | 11 ++++++----- modules/malt/run/meta.yml | 5 +++++ tests/modules/malt/run/main.nf | 6 +++++- 3 files changed, 16 insertions(+), 6 deletions(-) diff --git a/modules/malt/run/main.nf b/modules/malt/run/main.nf index 99657305..61c02ec1 100644 --- a/modules/malt/run/main.nf +++ b/modules/malt/run/main.nf @@ -1,4 +1,5 @@ process MALT_RUN { + tag "$meta.id" label 'process_high' conda (params.enable_conda ? "bioconda::malt=0.53" : null) @@ -7,15 +8,15 @@ process MALT_RUN { 'quay.io/biocontainers/malt:0.53--hdfd78af_0' }" input: - path fastqs + tuple val(meta), path(fastqs) val mode path index output: - path "*.rma6" , emit: rma6 - path "*.{tab,text,sam}", optional:true, emit: alignments - path "*.log" , emit: log - path "versions.yml" , emit: versions + tuple val(meta), path("*.rma6") , emit: rma6 + tuple val(meta), path("*.{tab,text,sam}"), optional:true, emit: alignments + tuple val(meta), path("*.log") , emit: log + path "versions.yml" , emit: versions when: task.ext.when == null || task.ext.when diff --git a/modules/malt/run/meta.yml b/modules/malt/run/meta.yml index 7bd79290..ae4277a8 100644 --- a/modules/malt/run/meta.yml +++ b/modules/malt/run/meta.yml @@ -19,6 +19,11 @@ tools: licence: ["GPL v3"] input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] - fastqs: type: file description: Input FASTQ files diff --git a/tests/modules/malt/run/main.nf b/tests/modules/malt/run/main.nf index 292a3fcf..d92dee71 100644 --- a/tests/modules/malt/run/main.nf +++ b/tests/modules/malt/run/main.nf @@ -12,10 +12,14 @@ workflow test_malt_run { gff = file(params.test_data['sarscov2']['genome']['genome_gff3'], checkIfExists: true) seq_type = "DNA" map_db = file("https://software-ab.informatik.uni-tuebingen.de/download/megan6/megan-nucl-Jan2021.db.zip", checkIfExists: true) - input = file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] mode = "BlastN" UNZIP ( map_db ) MALT_BUILD ( fastas, seq_type, gff, UNZIP.out.unzipped_archive ) MALT_RUN ( input, mode, MALT_BUILD.out.index ) } + From d8028dc1c3ef64c2ee3494ce65d4f4a76c42bde9 Mon Sep 17 00:00:00 2001 From: Ramprasad Neethiraj <20065894+ramprasadn@users.noreply.github.com> Date: Thu, 3 Mar 2022 17:40:16 +0100 Subject: [PATCH 023/283] Add picard/sortvcf (#1370) * sortvcf * add files * update meta * update java mem * update documentation link * remove todo * review suggestions * fix test.yml * fix conda error * fix version code --- modules/picard/sortvcf/main.nf | 49 ++++++++++++++++++++ modules/picard/sortvcf/meta.yml | 40 ++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/picard/sortvcf/main.nf | 18 +++++++ tests/modules/picard/sortvcf/nextflow.config | 5 ++ tests/modules/picard/sortvcf/test.yml | 7 +++ 6 files changed, 123 insertions(+) create mode 100644 modules/picard/sortvcf/main.nf create mode 100644 modules/picard/sortvcf/meta.yml create mode 100644 tests/modules/picard/sortvcf/main.nf create mode 100644 tests/modules/picard/sortvcf/nextflow.config create mode 100644 tests/modules/picard/sortvcf/test.yml diff --git a/modules/picard/sortvcf/main.nf b/modules/picard/sortvcf/main.nf new file mode 100644 index 00000000..0f10c1ab --- /dev/null +++ b/modules/picard/sortvcf/main.nf @@ -0,0 +1,49 @@ +process PICARD_SORTVCF { + tag "$meta.id" + label 'process_medium' + + conda (params.enable_conda ? "bioconda::picard=2.26.10" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/picard:2.26.10--hdfd78af_0' : + 'quay.io/biocontainers/picard:2.26.10--hdfd78af_0' }" + + input: + tuple val(meta), path(vcf) + path reference + path sequence_dict + + output: + tuple val(meta), path("*_sorted.vcf.gz"), emit: vcf + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def seq_dict = sequence_dict ? "-SEQUENCE_DICTIONARY $sequence_dict" : "" + def reference = reference ? "-REFERENCE_SEQUENCE $reference" : "" + def avail_mem = 3 + if (!task.memory) { + log.info '[Picard SortVcf] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } + + """ + picard \\ + SortVcf \\ + -Xmx${avail_mem}g \\ + --INPUT $vcf \\ + $args \\ + $seq_dict \\ + $reference \\ + --OUTPUT ${prefix}_sorted.vcf.gz + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + picard: \$(picard SortVcf --version 2>&1 | grep -o 'Version:.*' | cut -f2- -d:) + END_VERSIONS + """ +} diff --git a/modules/picard/sortvcf/meta.yml b/modules/picard/sortvcf/meta.yml new file mode 100644 index 00000000..a2b46d5a --- /dev/null +++ b/modules/picard/sortvcf/meta.yml @@ -0,0 +1,40 @@ +name: picard_sortvcf +description: Sorts vcf files +keywords: + - sort + - vcf +tools: + - picard: + description: Java tools for working with NGS data in the BAM/CRAM/SAM and VCF format + homepage: https://broadinstitute.github.io/picard/ + documentation: https://broadinstitute.github.io/picard/command-line-overview.html#SortVcf + licence: ['MIT'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - vcf: + type: file + description: VCF file + pattern: "*.{vcf,vcf.gz}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - vcf: + type: file + description: Sorted VCF file + pattern: "*.{vcf}" + +authors: + - "@ramprasadn" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index de03a379..553128de 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -1249,6 +1249,10 @@ picard/sortsam: - modules/picard/sortsam/** - tests/modules/picard/sortsam/** +picard/sortvcf: + - modules/picard/sortvcf/** + - tests/modules/picard/sortvcf/** + pirate: - modules/pirate/** - tests/modules/pirate/** diff --git a/tests/modules/picard/sortvcf/main.nf b/tests/modules/picard/sortvcf/main.nf new file mode 100644 index 00000000..a88c69fc --- /dev/null +++ b/tests/modules/picard/sortvcf/main.nf @@ -0,0 +1,18 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { PICARD_SORTVCF } from '../../../../modules/picard/sortvcf/main.nf' + +workflow test_picard_sortvcf { + + input = [ [ id:'test' ], // meta map + file(params.test_data['sarscov2']['illumina']['test_vcf'], checkIfExists: true) + ] + + fasta = [ file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ] + + dict = [ file(params.test_data['sarscov2']['genome']['genome_dict'], checkIfExists: true) ] + + PICARD_SORTVCF ( input, fasta, dict ) +} diff --git a/tests/modules/picard/sortvcf/nextflow.config b/tests/modules/picard/sortvcf/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/picard/sortvcf/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/picard/sortvcf/test.yml b/tests/modules/picard/sortvcf/test.yml new file mode 100644 index 00000000..26271077 --- /dev/null +++ b/tests/modules/picard/sortvcf/test.yml @@ -0,0 +1,7 @@ +- name: picard sortvcf + command: nextflow run ./tests/modules/picard/sortvcf -entry test_picard_sortvcf -c ./tests/config/nextflow.config -c ./tests/modules/picard/sortvcf/nextflow.config + tags: + - picard + - picard/sortvcf + files: + - path: output/picard/test_sorted.vcf.gz From 72b96f4e504eef673f2b5c13560a9d90b669129b Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Thu, 3 Mar 2022 17:58:04 +0100 Subject: [PATCH 024/283] Add missing $prefix definition for MALT_RUN (#1373) * fix: remove left-over unnecessary code * Add forgotten prefix for the log * Update meta.yml * Update tests Co-authored-by: Sateesh Peri <33637490+sateeshperi@users.noreply.github.com> --- modules/malt/run/main.nf | 3 ++- modules/malt/run/meta.yml | 2 +- tests/modules/malt/run/test.yml | 2 +- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/modules/malt/run/main.nf b/modules/malt/run/main.nf index 61c02ec1..4e2e50c9 100644 --- a/modules/malt/run/main.nf +++ b/modules/malt/run/main.nf @@ -23,6 +23,7 @@ process MALT_RUN { script: def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" def avail_mem = 6 if (!task.memory) { log.info '[MALT_RUN] Available memory not known - defaulting to 6GB. Specify process memory requirements to change this.' @@ -39,7 +40,7 @@ process MALT_RUN { $args \\ --inFile ${fastqs.join(' ')} \\ -m $mode \\ - --index $index/ |&tee malt-run.log + --index $index/ |&tee ${prefix}-malt-run.log cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/modules/malt/run/meta.yml b/modules/malt/run/meta.yml index ae4277a8..66f2d7a9 100644 --- a/modules/malt/run/meta.yml +++ b/modules/malt/run/meta.yml @@ -52,7 +52,7 @@ output: - log: type: file description: Log of verbose MALT stdout - pattern: "malt-run.log" + pattern: "*-malt-run.log" authors: - "@jfy133" diff --git a/tests/modules/malt/run/test.yml b/tests/modules/malt/run/test.yml index 335bc977..8ad44094 100644 --- a/tests/modules/malt/run/test.yml +++ b/tests/modules/malt/run/test.yml @@ -5,4 +5,4 @@ - malt/run files: - path: output/malt/test_1.rma6 - - path: output/malt/malt-run.log + - path: output/malt/test-malt-run.log From 251015c8bac16ecb55d738362503f17a84c45d18 Mon Sep 17 00:00:00 2001 From: Michael L Heuer Date: Fri, 4 Mar 2022 09:08:02 -0600 Subject: [PATCH 025/283] Add samtools index to yara_mapper module (#1353) * Add samtools index to yara_mapper module. * samtools sort required for index Co-authored-by: James A. Fellows Yates --- modules/yara/mapper/main.nf | 12 +++++++++--- modules/yara/mapper/meta.yml | 4 ++++ tests/modules/yara/mapper/test.yml | 3 +++ 3 files changed, 16 insertions(+), 3 deletions(-) diff --git a/modules/yara/mapper/main.nf b/modules/yara/mapper/main.nf index 9c993ac7..15b39236 100644 --- a/modules/yara/mapper/main.nf +++ b/modules/yara/mapper/main.nf @@ -13,6 +13,7 @@ process YARA_MAPPER { output: tuple val(meta), path("*.mapped.bam"), emit: bam + tuple val(meta), path("*.mapped.bam.bai"), emit: bai path "versions.yml" , emit: versions when: @@ -28,7 +29,9 @@ process YARA_MAPPER { -t $task.cpus \\ -f bam \\ ${index}/yara \\ - $reads | samtools view -@ $task.cpus -hb -F4 > ${prefix}.mapped.bam + $reads | samtools view -@ $task.cpus -hb -F4 | samtools sort -@ $task.cpus > ${prefix}.mapped.bam + + samtools index -@ $task.cpus ${prefix}.mapped.bam cat <<-END_VERSIONS > versions.yml "${task.process}": @@ -46,8 +49,11 @@ process YARA_MAPPER { ${reads[0]} \\ ${reads[1]} > output.bam - samtools view -@ $task.cpus -hF 4 -f 0x40 -b output.bam > ${prefix}_1.mapped.bam - samtools view -@ $task.cpus -hF 4 -f 0x80 -b output.bam > ${prefix}_2.mapped.bam + samtools view -@ $task.cpus -hF 4 -f 0x40 -b output.bam | samtools sort -@ $task.cpus > ${prefix}_1.mapped.bam + samtools view -@ $task.cpus -hF 4 -f 0x80 -b output.bam | samtools sort -@ $task.cpus > ${prefix}_2.mapped.bam + + samtools index -@ $task.cpus ${prefix}_1.mapped.bam + samtools index -@ $task.cpus ${prefix}_2.mapped.bam cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/modules/yara/mapper/meta.yml b/modules/yara/mapper/meta.yml index 60089474..188e1d52 100644 --- a/modules/yara/mapper/meta.yml +++ b/modules/yara/mapper/meta.yml @@ -45,6 +45,10 @@ output: type: file description: Sorted BAM file pattern: "*.{bam}" + - bai: + type: file + description: Sorted BAM file index + pattern: "*.{bai}" authors: - "@apeltzer" diff --git a/tests/modules/yara/mapper/test.yml b/tests/modules/yara/mapper/test.yml index 186f70b4..3bfddc5b 100644 --- a/tests/modules/yara/mapper/test.yml +++ b/tests/modules/yara/mapper/test.yml @@ -5,6 +5,7 @@ - yara files: - path: output/yara/test.mapped.bam + - path: output/yara/test.mapped.bam.bai - path: output/yara/yara/yara.txt.size md5sum: 063987b3c3f747be7d2b8043c9d91000 - path: output/yara/yara/yara.lf.drs @@ -39,7 +40,9 @@ - yara files: - path: output/yara/test_2.mapped.bam + - path: output/yara/test_2.mapped.bam.bai - path: output/yara/test_1.mapped.bam + - path: output/yara/test_1.mapped.bam.bai - path: output/yara/yara/yara.txt.size md5sum: 063987b3c3f747be7d2b8043c9d91000 - path: output/yara/yara/yara.lf.drs From de0d57a5623ecb81d1bbc7ad73b5a8754b903d4c Mon Sep 17 00:00:00 2001 From: Benjamin Wingfield Date: Mon, 7 Mar 2022 18:02:40 +0000 Subject: [PATCH 026/283] implement plink2/score module (#1259) * implement plink2/score module * fix test yml * fix typo :( * set cpu * set mem * fix input process input block * fix tests Co-authored-by: Sateesh <33637490+sateeshperi@users.noreply.github.com> --- modules/plink2/score/main.nf | 39 +++++++++++++++ modules/plink2/score/meta.yml | 56 ++++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/config/test_data.config | 3 +- tests/modules/plink2/score/main.nf | 24 ++++++++++ tests/modules/plink2/score/nextflow.config | 15 ++++++ tests/modules/plink2/score/test.yml | 16 +++++++ 7 files changed, 156 insertions(+), 1 deletion(-) create mode 100644 modules/plink2/score/main.nf create mode 100644 modules/plink2/score/meta.yml create mode 100644 tests/modules/plink2/score/main.nf create mode 100644 tests/modules/plink2/score/nextflow.config create mode 100644 tests/modules/plink2/score/test.yml diff --git a/modules/plink2/score/main.nf b/modules/plink2/score/main.nf new file mode 100644 index 00000000..6f561322 --- /dev/null +++ b/modules/plink2/score/main.nf @@ -0,0 +1,39 @@ +process PLINK2_SCORE { + tag "$meta.id" + label 'process_low' + + conda (params.enable_conda ? "bioconda::plink2=2.00a2.3" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/plink2:2.00a2.3--h712d239_1' : + 'quay.io/biocontainers/plink2:2.00a2.3--h712d239_1' }" + + input: + tuple val(meta), path(pgen), path(psam), path(pvar) + path(scorefile) + + output: + tuple val(meta), path("*.sscore"), emit: score + path("versions.yml") , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def mem_mb = task.memory.toMega() // plink is greedy + """ + plink2 \\ + --threads $task.cpus \\ + --memory $mem_mb \\ + --pfile ${pgen.baseName} vzs \\ + --score ${scorefile} \\ + $args \\ + --out ${prefix} + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + plink2: \$(plink2 --version 2>&1 | sed 's/^PLINK v//; s/ 64.*\$//' ) + END_VERSIONS + """ +} diff --git a/modules/plink2/score/meta.yml b/modules/plink2/score/meta.yml new file mode 100644 index 00000000..5dad6259 --- /dev/null +++ b/modules/plink2/score/meta.yml @@ -0,0 +1,56 @@ +name: plink2_score +description: Apply a scoring system to each sample in a plink 2 fileset +keywords: + - plink2 + - score +tools: + - plink2: + description: | + Whole genome association analysis toolset, designed to perform a range + of basic, large-scale analyses in a computationally efficient manner + homepage: http://www.cog-genomics.org/plink/2.0/ + documentation: http://www.cog-genomics.org/plink/2.0/general_usage + tool_dev_url: None + doi: "10.1186/s13742-015-0047-8" + licence: ['GPL v3'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - pgen: + type: file + description: PLINK 2 binary genotype table + pattern: "*.{pgen}" + - psam: + type: file + description: PLINK 2 sample information file + pattern: "*.{psam}" + - pvar: + type: file + description: PLINK 2 variant information file + pattern: "*.{pvar}" + - scorefile: + type: file + description: A text file containing variant identifiers and weights + pattern: "*.{scores,txt,scorefile}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - score: + type: file + description: A text file containing sample scores, in plink 2 .sscore format + pattern: "*.{sscore}" + +authors: + - "@nebfield" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 553128de..d6575ff1 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -1273,6 +1273,10 @@ plink2/extract: - modules/plink2/extract/** - tests/modules/plink2/extract/** +plink2/score: + - modules/plink2/score/** + - tests/modules/plink2/score/** + plink2/vcf: - modules/plink2/vcf/** - tests/modules/plink2/vcf/** diff --git a/tests/config/test_data.config b/tests/config/test_data.config index dda10192..ce4f7ae8 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -119,7 +119,7 @@ params { genome_bed_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/genome/genome.bed.gz.tbi" transcriptome_fasta = "${test_data_dir}/genomics/homo_sapiens/genome/transcriptome.fasta" genome2_fasta = "${test_data_dir}/genomics/homo_sapiens/genome/genome2.fasta" - genome_chain_gz = "${test_data_dir}/genomics/homo_sapiens/genome/genome.chain.gz" + genome_chain_gz = "${test_data_dir}/genomics/homo_sapiens/genome/genome.chain.gz" genome_21_fasta = "${test_data_dir}/genomics/homo_sapiens/genome/chr21/sequence/genome.fasta" genome_21_fasta_fai = "${test_data_dir}/genomics/homo_sapiens/genome/chr21/sequence/genome.fasta.fai" genome_21_dict = "${test_data_dir}/genomics/homo_sapiens/genome/chr21/sequence/genome.dict" @@ -138,6 +138,7 @@ params { mills_and_1000g_indels_vcf_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/mills_and_1000G.indels.vcf.gz.tbi" syntheticvcf_short_vcf_gz = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/syntheticvcf_short.vcf.gz" syntheticvcf_short_vcf_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/syntheticvcf_short.vcf.gz.tbi" + syntheticvcf_short_score = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/syntheticvcf_short.score" gnomad_r2_1_1_sv_vcf_gz = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/gnomAD.r2.1.1-sv.vcf.gz" hapmap_3_3_hg38_21_vcf_gz = "${test_data_dir}/genomics/homo_sapiens/genome/chr21/germlineresources/hapmap_3.3.hg38.vcf.gz" diff --git a/tests/modules/plink2/score/main.nf b/tests/modules/plink2/score/main.nf new file mode 100644 index 00000000..6a09e829 --- /dev/null +++ b/tests/modules/plink2/score/main.nf @@ -0,0 +1,24 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { PLINK2_VCF } from '../../../../modules/plink2/vcf/main.nf' +include { PLINK2_SCORE } from '../../../../modules/plink2/score/main.nf' + +workflow test_plink2_score { + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['homo_sapiens']['genome']['syntheticvcf_short_vcf_gz'], checkIfExists: true) + ] + PLINK2_VCF ( input ) + + scorefile = file(params.test_data['homo_sapiens']['genome']['syntheticvcf_short_score'], checkIfExists: true) + + PLINK2_VCF.out.pgen + .concat(PLINK2_VCF.out.psam, PLINK2_VCF.out.pvar) + .groupTuple() + .map { it.flatten() } + .set { ch_target_genome } + + PLINK2_SCORE ( ch_target_genome, scorefile ) +} diff --git a/tests/modules/plink2/score/nextflow.config b/tests/modules/plink2/score/nextflow.config new file mode 100644 index 00000000..083e4666 --- /dev/null +++ b/tests/modules/plink2/score/nextflow.config @@ -0,0 +1,15 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + // relabel input variants to a common scheme chr:pos:alt:ref + withName: PLINK2_VCF { + ext.args = '--set-missing-var-ids @:#:\\$1:\\$2' + } + + // scoring really needs an adjustment for small test dataset (n > 50 + // normally) + withName: PLINK2_SCORE { + ext.args = 'no-mean-imputation' + } +} diff --git a/tests/modules/plink2/score/test.yml b/tests/modules/plink2/score/test.yml new file mode 100644 index 00000000..7993cb34 --- /dev/null +++ b/tests/modules/plink2/score/test.yml @@ -0,0 +1,16 @@ +- name: plink2 score test_plink2_score + command: nextflow run tests/modules/plink2/score -entry test_plink2_score -c tests/config/nextflow.config + tags: + - plink2 + - plink2/score + files: + - path: output/plink2/test.pgen + md5sum: fac12ca9041d6950f6b7d60ac2120721 + - path: output/plink2/test.psam + md5sum: e6c714488754cb8448c3dfda08c4c0ea + - path: output/plink2/test.pvar.zst + md5sum: 98d59e9779a8b62d5032cd98b642a63b + - path: output/plink2/test.sscore + md5sum: 97bde840f69febd65f2c00e9243126e9 + - path: output/plink2/versions.yml + md5sum: 71499ab14e1583c88ced3a7a4f05bfa7 From b78a4a456762a4c59fd5023e70f36a27f76d4a97 Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Tue, 8 Mar 2022 15:56:23 +0100 Subject: [PATCH 027/283] Fix for Maxbin2 emitting input files (#1376) * fix: remove left-over unnecessary code * Fix accidently emitting input * Fix tests --- modules/maxbin2/main.nf | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/modules/maxbin2/main.nf b/modules/maxbin2/main.nf index 7b818d00..a48df43f 100644 --- a/modules/maxbin2/main.nf +++ b/modules/maxbin2/main.nf @@ -29,8 +29,9 @@ process MAXBIN2 { def prefix = task.ext.prefix ?: "${meta.id}" def associate_files = reads ? "-reads $reads" : "-abund $abund" """ + mkdir input/ && mv $contigs input/ run_MaxBin.pl \\ - -contig $contigs \\ + -contig input/$contigs \\ $associate_files \\ -thread $task.cpus \\ $args \\ From e79bcd7d4e517b72045924c16bb778a2f074cf88 Mon Sep 17 00:00:00 2001 From: Mahesh Binzer-Panchal Date: Wed, 9 Mar 2022 14:55:31 +0100 Subject: [PATCH 028/283] Add git aware option to pytest commands (#1379) --- .github/PULL_REQUEST_TEMPLATE.md | 6 +++--- .github/workflows/pytest-workflow.yml | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index b9f7a4e8..cfe07f88 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -27,6 +27,6 @@ Closes #XXX - [ ] Add a resource `label` - [ ] Use BioConda and BioContainers if possible to fulfil software requirements. - Ensure that the test works with either Docker / Singularity. Conda CI tests can be quite flaky: - - [ ] `PROFILE=docker pytest --tag --symlink --keep-workflow-wd` - - [ ] `PROFILE=singularity pytest --tag --symlink --keep-workflow-wd` - - [ ] `PROFILE=conda pytest --tag --symlink --keep-workflow-wd` + - [ ] `PROFILE=docker pytest --tag --symlink --keep-workflow-wd --git-aware` + - [ ] `PROFILE=singularity pytest --tag --symlink --keep-workflow-wd --git-aware` + - [ ] `PROFILE=conda pytest --tag --symlink --keep-workflow-wd --git-aware` diff --git a/.github/workflows/pytest-workflow.yml b/.github/workflows/pytest-workflow.yml index ee922c45..b2be6aa3 100644 --- a/.github/workflows/pytest-workflow.yml +++ b/.github/workflows/pytest-workflow.yml @@ -86,7 +86,7 @@ jobs: # Test the module - name: Run pytest-workflow # only use one thread for pytest-workflow to avoid race condition on conda cache. - run: TMPDIR=~ PROFILE=${{ matrix.profile }} pytest --tag ${{ matrix.tags }} --symlink --kwdof + run: TMPDIR=~ PROFILE=${{ matrix.profile }} pytest --tag ${{ matrix.tags }} --symlink --kwdof --git-aware - name: Output log on failure if: failure() From b82d7abe7089a4b411e326c9e129faf03ba45741 Mon Sep 17 00:00:00 2001 From: "Thomas A. Christensen II" <25492070+MillironX@users.noreply.github.com> Date: Wed, 9 Mar 2022 10:56:35 -0600 Subject: [PATCH 029/283] Decrease indent in seqtk/seq versions.yml output (#1384) Signed-off-by: Thomas A. Christensen II <25492070+MillironX@users.noreply.github.com> Co-authored-by: Sateesh Peri <33637490+sateeshperi@users.noreply.github.com> --- modules/seqtk/seq/main.nf | 4 ++-- tests/modules/seqtk/seq/test.yml | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/modules/seqtk/seq/main.nf b/modules/seqtk/seq/main.nf index 1fb03003..d1944ef9 100644 --- a/modules/seqtk/seq/main.nf +++ b/modules/seqtk/seq/main.nf @@ -33,8 +33,8 @@ process SEQTK_SEQ { gzip -c > ${prefix}.seqtk-seq.${extension}.gz cat <<-END_VERSIONS > versions.yml - "${task.process}": - seqtk: \$(echo \$(seqtk 2>&1) | sed 's/^.*Version: //; s/ .*\$//') + "${task.process}": + seqtk: \$(echo \$(seqtk 2>&1) | sed 's/^.*Version: //; s/ .*\$//') END_VERSIONS """ } diff --git a/tests/modules/seqtk/seq/test.yml b/tests/modules/seqtk/seq/test.yml index 3e4532c6..c6937364 100644 --- a/tests/modules/seqtk/seq/test.yml +++ b/tests/modules/seqtk/seq/test.yml @@ -7,7 +7,7 @@ - path: output/seqtk/test.seqtk-seq.fasta.gz md5sum: 50d73992c8c7e56dc095ef47ec52a754 - path: output/seqtk/versions.yml - md5sum: 2b89cd4a6e28f35fcfbbd2188384f944 + md5sum: 6555e1061080c44f828de0b40b299e41 - name: seqtk seq test_seqtk_seq_fq command: nextflow run tests/modules/seqtk/seq -entry test_seqtk_seq_fq -c tests/config/nextflow.config @@ -18,4 +18,4 @@ - path: output/seqtk/test.seqtk-seq.fasta.gz md5sum: 2f009f1647971a97b4edec726a99dc1a - path: output/seqtk/versions.yml - md5sum: 3467a76d3540bee8f58de050512bddaa + md5sum: feb70feb3165d5c19fa50c16e46e6772 From 24f0bdd14ec32e0114aa6ee5337ddbd490ffd570 Mon Sep 17 00:00:00 2001 From: Michael J Cipriano <42848032+mjcipriano@users.noreply.github.com> Date: Wed, 9 Mar 2022 12:36:05 -0500 Subject: [PATCH 030/283] added module seqkit replace (#1382) * added module seqkit replace * added when * removed extra line * Update modules/seqkit/replace/main.nf Co-authored-by: Robert A. Petit III * Updated meta * updated indents Co-authored-by: Cipriano Co-authored-by: Sateesh Peri <33637490+sateeshperi@users.noreply.github.com> Co-authored-by: Robert A. Petit III --- modules/seqkit/replace/main.nf | 41 ++++++++++++++++++++ modules/seqkit/replace/meta.yml | 41 ++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/seqkit/replace/main.nf | 24 ++++++++++++ tests/modules/seqkit/replace/nextflow.config | 14 +++++++ tests/modules/seqkit/replace/test.yml | 21 ++++++++++ 6 files changed, 145 insertions(+) create mode 100644 modules/seqkit/replace/main.nf create mode 100644 modules/seqkit/replace/meta.yml create mode 100644 tests/modules/seqkit/replace/main.nf create mode 100644 tests/modules/seqkit/replace/nextflow.config create mode 100644 tests/modules/seqkit/replace/test.yml diff --git a/modules/seqkit/replace/main.nf b/modules/seqkit/replace/main.nf new file mode 100644 index 00000000..db189ef6 --- /dev/null +++ b/modules/seqkit/replace/main.nf @@ -0,0 +1,41 @@ +process SEQKIT_REPLACE { + tag "$meta.id" + label 'process_low' + + conda (params.enable_conda ? "bioconda::seqkit=2.1.0" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/seqkit:2.1.0--h9ee0642_0': + 'quay.io/biocontainers/seqkit:2.1.0--h9ee0642_0' }" + + input: + tuple val(meta), path(fastx) + + output: + tuple val(meta), path("*.fast*"), emit: fastx + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def extension = "fastq" + if ("$fastx" ==~ /.+\.fasta|.+\.fasta.gz|.+\.fa|.+\.fa.gz|.+\.fas|.+\.fas.gz|.+\.fna|.+\.fna.gz/) { + extension = "fasta" + } + def endswith = task.ext.suffix ?: "${extension}.gz" + """ + seqkit \\ + replace \\ + ${args} \\ + --threads ${task.cpus} \\ + -i ${fastx} \\ + -o ${prefix}.${endswith} + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + seqkit: \$( seqkit | sed '3!d; s/Version: //' ) + END_VERSIONS + """ +} diff --git a/modules/seqkit/replace/meta.yml b/modules/seqkit/replace/meta.yml new file mode 100644 index 00000000..c15d04cb --- /dev/null +++ b/modules/seqkit/replace/meta.yml @@ -0,0 +1,41 @@ +name: seqkit_replace +description: Use seqkit to find/replace strings within sequences and sequence headers +keywords: + - seqkit + - replace +tools: + - seqkit: + description: Cross-platform and ultrafast toolkit for FASTA/Q file manipulation, written by Wei Shen. + homepage: https://bioinf.shenwei.me/seqkit/usage/ + documentation: https://bioinf.shenwei.me/seqkit/usage/ + tool_dev_url: https://github.com/shenwei356/seqkit/ + doi: "10.1371/journal.pone.016396" + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - fastx: + type: file + description: fasta/q file + pattern: "*.{fasta,fastq,fa,fq,fas,fna,faa}*" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - fastx: + type: file + description: fasta/q file with replaced values + pattern: "*.{fasta,fastq,fa,fq,fas,fna,faa}*" + +authors: + - "@mjcipriano" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index d6575ff1..a370f371 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -1481,6 +1481,10 @@ seqkit/pair: - modules/seqkit/pair/** - tests/modules/seqkit/pair/** +seqkit/replace: + - modules/seqkit/replace/** + - tests/modules/seqkit/replace/** + seqkit/split2: - modules/seqkit/split2/** - tests/modules/seqkit/split2/** diff --git a/tests/modules/seqkit/replace/main.nf b/tests/modules/seqkit/replace/main.nf new file mode 100644 index 00000000..5c4058e7 --- /dev/null +++ b/tests/modules/seqkit/replace/main.nf @@ -0,0 +1,24 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { SEQKIT_REPLACE } from '../../../../modules/seqkit/replace/main.nf' +include { SEQKIT_REPLACE as SEQKIT_REPLACEUNCOMP } from '../../../../modules/seqkit/replace/main.nf' + +workflow test_seqkit_replace { + + input = [ [ id:'test' ], // meta map + [ file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ] + ] + + SEQKIT_REPLACE ( input ) +} + +workflow test_seqkit_replace_uncomp { + + input = [ [ id:'test' ], // meta map + [ file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ] + ] + + SEQKIT_REPLACEUNCOMP ( input ) +} diff --git a/tests/modules/seqkit/replace/nextflow.config b/tests/modules/seqkit/replace/nextflow.config new file mode 100644 index 00000000..8cec8505 --- /dev/null +++ b/tests/modules/seqkit/replace/nextflow.config @@ -0,0 +1,14 @@ +process { + + withName: 'SEQKIT_REPLACE' { + ext.args = "-s -p 'A' -r 'N'" + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + } + + withName: 'SEQKIT_REPLACEUNCOMP' { + ext.args = "-s -p 'T' -r 'N'" + ext.suffix = ".fasta" + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + } + +} diff --git a/tests/modules/seqkit/replace/test.yml b/tests/modules/seqkit/replace/test.yml new file mode 100644 index 00000000..94c3a5ef --- /dev/null +++ b/tests/modules/seqkit/replace/test.yml @@ -0,0 +1,21 @@ +- name: seqkit replace test_seqkit_replace + command: nextflow run tests/modules/seqkit/replace -entry test_seqkit_replace -c tests/config/nextflow.config + tags: + - seqkit + - seqkit/replace + files: + - path: output/seqkit/test.fasta.gz + md5sum: 053847219695c0a923d02352442d7abf + - path: output/seqkit/versions.yml + md5sum: dc9d18b7836c9db00a3032fd191bd831 + +- name: seqkit replace test_seqkit_replace_uncomp + command: nextflow run tests/modules/seqkit/replace -entry test_seqkit_replace_uncomp -c tests/config/nextflow.config + tags: + - seqkit + - seqkit/replace + files: + - path: output/seqkit/test..fasta + md5sum: 05d3294a62c72f5489f067c1da3c2f6c + - path: output/seqkit/versions.yml + md5sum: 3b88128487ec949f0bdeecebc375c407 From 62da45b0e1202677a07e2da0ee9f6181466232fb Mon Sep 17 00:00:00 2001 From: Jose Espinosa-Carrasco Date: Thu, 10 Mar 2022 09:23:45 +0100 Subject: [PATCH 031/283] Bump chromap version 0.2.0 (#1374) * Bump chromap version 0.2.0 * Temporary use the docker container until singularity container becomes available * Temporary use the docker container until singularity container available * Remove empty lines * Update singularity container after became available --- modules/chromap/chromap/main.nf | 6 +++--- modules/chromap/index/main.nf | 7 +++---- tests/modules/chromap/chromap/test.yml | 8 +++++++- tests/modules/chromap/index/test.yml | 2 ++ 4 files changed, 15 insertions(+), 8 deletions(-) diff --git a/modules/chromap/chromap/main.nf b/modules/chromap/chromap/main.nf index cdbf6049..4ee86b92 100644 --- a/modules/chromap/chromap/main.nf +++ b/modules/chromap/chromap/main.nf @@ -2,10 +2,10 @@ process CHROMAP_CHROMAP { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? "bioconda::chromap=0.1.5 bioconda::samtools=1.14" : null) + conda (params.enable_conda ? "bioconda::chromap=0.2.0 bioconda::samtools=1.14" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/mulled-v2-1f09f39f20b1c4ee36581dc81cc323c70e661633:724a1037d59f6a19c9d4e7bdba77b52b37de0dc3-0' : - 'quay.io/biocontainers/mulled-v2-1f09f39f20b1c4ee36581dc81cc323c70e661633:724a1037d59f6a19c9d4e7bdba77b52b37de0dc3-0' }" + 'https://depot.galaxyproject.org/singularity/mulled-v2-1f09f39f20b1c4ee36581dc81cc323c70e661633:ed3529ef5253d7ccbc688b6a4c5c447152685757-0' : + 'quay.io/biocontainers/mulled-v2-1f09f39f20b1c4ee36581dc81cc323c70e661633:ed3529ef5253d7ccbc688b6a4c5c447152685757-0' }" input: tuple val(meta), path(reads) diff --git a/modules/chromap/index/main.nf b/modules/chromap/index/main.nf index 52deaf06..2696d6a5 100644 --- a/modules/chromap/index/main.nf +++ b/modules/chromap/index/main.nf @@ -2,11 +2,10 @@ process CHROMAP_INDEX { tag '$fasta' label 'process_medium' - conda (params.enable_conda ? "bioconda::chromap=0.1.5" : null) + conda (params.enable_conda ? "bioconda::chromap=0.2.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/chromap:0.1.5--h9a82719_0' : - 'quay.io/biocontainers/chromap:0.1.5--h9a82719_0' }" - + 'https://depot.galaxyproject.org/singularity/chromap:0.2.0--hd03093a_1' : + 'quay.io/biocontainers/chromap:0.2.0--hd03093a_1' }" input: path fasta diff --git a/tests/modules/chromap/chromap/test.yml b/tests/modules/chromap/chromap/test.yml index 09e5e753..40e45959 100644 --- a/tests/modules/chromap/chromap/test.yml +++ b/tests/modules/chromap/chromap/test.yml @@ -7,6 +7,8 @@ - path: output/chromap/genome.index - path: output/chromap/test.bed.gz md5sum: 25e40bde24c7b447292cd68573728694 + - path: output/chromap/versions.yml + md5sum: 2d3d2959ac20d98036807964896829e7 - name: chromap chromap test_chromap_chromap_paired_end command: nextflow run ./tests/modules/chromap/chromap -entry test_chromap_chromap_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/chromap/chromap/nextflow.config @@ -17,6 +19,8 @@ - path: output/chromap/genome.index - path: output/chromap/test.bed.gz md5sum: 7cdc8448882b75811e0c784f5f20aef2 + - path: output/chromap/versions.yml + md5sum: 51cff66779161d8a602cce5989017395 - name: chromap chromap test_chromap_chromap_paired_bam command: nextflow run ./tests/modules/chromap/chromap -entry test_chromap_chromap_paired_bam -c ./tests/config/nextflow.config -c ./tests/modules/chromap/chromap/nextflow.config @@ -26,4 +30,6 @@ files: - path: output/chromap/genome.index - path: output/chromap/test.bam - md5sum: 73e2c76007e3c61df625668e01b3f42f + md5sum: f255c7441d5a1f307fc642d2aa19647e + - path: output/chromap/versions.yml + md5sum: f91910c44169549c3923931de5c3afcb diff --git a/tests/modules/chromap/index/test.yml b/tests/modules/chromap/index/test.yml index dde1aa1b..b2aa37d8 100644 --- a/tests/modules/chromap/index/test.yml +++ b/tests/modules/chromap/index/test.yml @@ -5,3 +5,5 @@ - chromap files: - path: output/chromap/genome.index + - path: output/chromap/versions.yml + md5sum: b75dec647f9dc5f4887f36d1db7a9ccd From 79a9d5e1eae6cc6f3b2a0a0d02e61ae4b5ef3748 Mon Sep 17 00:00:00 2001 From: Simon Pearce <24893913+SPPearce@users.noreply.github.com> Date: Fri, 11 Mar 2022 09:02:10 +0000 Subject: [PATCH 032/283] New module: NGSCheckMate (#1290) NGSCheckMate ncm mode, working on bam files and vcf files to check that (human) samples match as expected Co-authored-by: Simon Pearce Co-authored-by: Mahesh Binzer-Panchal --- modules/ngscheckmate/ncm/main.nf | 49 ++++++++++++++ modules/ngscheckmate/ncm/meta.yml | 64 +++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/ngscheckmate/ncm/main.nf | 63 ++++++++++++++++++ .../modules/ngscheckmate/ncm/nextflow.config | 27 ++++++++ tests/modules/ngscheckmate/ncm/test.yml | 29 +++++++++ 6 files changed, 236 insertions(+) create mode 100644 modules/ngscheckmate/ncm/main.nf create mode 100644 modules/ngscheckmate/ncm/meta.yml create mode 100644 tests/modules/ngscheckmate/ncm/main.nf create mode 100644 tests/modules/ngscheckmate/ncm/nextflow.config create mode 100644 tests/modules/ngscheckmate/ncm/test.yml diff --git a/modules/ngscheckmate/ncm/main.nf b/modules/ngscheckmate/ncm/main.nf new file mode 100644 index 00000000..2712c984 --- /dev/null +++ b/modules/ngscheckmate/ncm/main.nf @@ -0,0 +1,49 @@ +process NGSCHECKMATE_NCM { + label 'process_low' + + conda (params.enable_conda ? "bioconda::ngscheckmate=1.0.0" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/ngscheckmate:1.0.0--py27r41hdfd78af_3': + 'quay.io/biocontainers/ngscheckmate:1.0.0--py27r41hdfd78af_3' }" + + input: + path files + path snp_bed + path fasta + + output: + path "*.pdf" , emit: pdf + path "*_corr_matrix.txt", emit: corr_matrix + path "*_matched.txt" , emit: matched + path "*_all.txt" , emit: all + path "*.vcf" , emit: vcfs, optional: true + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "output" + def unzip = files.any { it.toString().endsWith(".vcf.gz") } + """ + if $unzip + then + for VCFGZ in *.vcf.gz; do + gunzip -cdf \$VCFGZ > \$( basename \$VCFGZ .gz ); + done + fi + + NCM_REF="./"${fasta} ncm.py -d . -bed ${snp_bed} -O . -N ${prefix} $args + + if $unzip + then + rm -f *.vcf # clean up decompressed vcfs + fi + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + ngscheckmate: \$(ncm.py --help | sed "7!d;s/ *Ensuring Sample Identity v//g") + END_VERSIONS + """ +} diff --git a/modules/ngscheckmate/ncm/meta.yml b/modules/ngscheckmate/ncm/meta.yml new file mode 100644 index 00000000..b8837b80 --- /dev/null +++ b/modules/ngscheckmate/ncm/meta.yml @@ -0,0 +1,64 @@ +name: ngscheckmate_ncm +description: Determining whether sequencing data comes from the same individual by using SNP matching. Designed for humans on vcf or bam files. +keywords: + - ngscheckmate + - matching + - snp +tools: + - ngscheckmate: + description: NGSCheckMate is a software package for identifying next generation sequencing (NGS) data files from the same individual, including matching between DNA and RNA. + homepage: https://github.com/parklab/NGSCheckMate + documentation: https://github.com/parklab/NGSCheckMate + tool_dev_url: https://github.com/parklab/NGSCheckMate + doi: "doi:/10.1093/nar/gkx193" + licence: ['MIT'] + +input: + - files: + type: file + description: VCF or BAM files for each sample, in a merged channel (possibly gzipped). BAM files require an index too. + pattern: "*.{vcf,vcf.gz,bam,bai}" + + - snp_bed: + type: file + description: BED file containing the SNPs to analyse + pattern: "*.{bed}" + + - fasta: + type: file + description: fasta file for the genome, only used in the bam mode + pattern: "*.{bed}" + +output: + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + + - pdf: + type: file + description: A pdf containing a dendrogram showing how the samples match up + pattern: "*.{pdf}" + + - corr_matrix: + type: file + description: A text file containing the correlation matrix between each sample + pattern: "*corr_matrix.txt" + + - matched: + type: file + description: A txt file containing only the samples that match with each other + pattern: "*matched.txt" + + - all: + type: file + description: A txt file containing all the sample comparisons, whether they match or not + pattern: "*all.txt" + + - vcfs: + type: file + description: If ran in bam mode, vcf files for each sample giving the SNP calls + pattern: "*.vcf" + +authors: + - "@sppearce" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index a370f371..5c110ba7 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -1145,6 +1145,10 @@ ngmaster: - modules/ngmaster/** - tests/modules/ngmaster/** +ngscheckmate/ncm: + - modules/ngscheckmate/ncm/** + - tests/modules/ngscheckmate/ncm/** + nucmer: - modules/nucmer/** - tests/modules/nucmer/** diff --git a/tests/modules/ngscheckmate/ncm/main.nf b/tests/modules/ngscheckmate/ncm/main.nf new file mode 100644 index 00000000..ab6a4639 --- /dev/null +++ b/tests/modules/ngscheckmate/ncm/main.nf @@ -0,0 +1,63 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { NGSCHECKMATE_NCM as NGSCHECKMATE_NCM_BAM} from '../../../../modules/ngscheckmate/ncm/main.nf' +include { NGSCHECKMATE_NCM as NGSCHECKMATE_NCM_VCF} from '../../../../modules/ngscheckmate/ncm/main.nf' + +include { BEDTOOLS_MAKEWINDOWS } from '../../../../modules/bedtools/makewindows/main.nf' + +include { BCFTOOLS_MPILEUP } from '../../../../modules/bcftools/mpileup/main.nf' +include { BCFTOOLS_MPILEUP as BCFTOOLS_MPILEUP2 } from '../../../../modules/bcftools/mpileup/main.nf' + +workflow test_ngscheckmate_ncm_bam { + input = [ file(params.test_data['sarscov2']['illumina']['test_paired_end_methylated_sorted_bam'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_paired_end_methylated_sorted_bam_bai'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true)] + + fasta = [ file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ] + + inputBed = [ [ id:'test'], + file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true)] + + BEDTOOLS_MAKEWINDOWS(inputBed, true). + tab. + map{it[1]}. + view(). + set{snp_channel} + + NGSCHECKMATE_NCM_BAM(input, snp_channel, fasta) +} + +workflow test_ngscheckmate_ncm_vcf { + input1 = [ [ id:'test1' ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) ] + ] + + input2 = [ [ id:'test2' ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) ] + ] + + fasta = [ file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ] + + inputBed = [ [ id:'test'], + file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true)] + + BCFTOOLS_MPILEUP ( input1, fasta, false ) + BCFTOOLS_MPILEUP2 ( input2, fasta, false ) + + BCFTOOLS_MPILEUP2.out.vcf. + combine( BCFTOOLS_MPILEUP.out.vcf ). + map { [ it[1], it[3] ] }. + set { vcf_channel } + + BEDTOOLS_MAKEWINDOWS( inputBed, true ).tab. + map { it[1] }. + view(). + set { snp_channel } + + NGSCHECKMATE_NCM_VCF(vcf_channel, snp_channel, fasta) +} + + diff --git a/tests/modules/ngscheckmate/ncm/nextflow.config b/tests/modules/ngscheckmate/ncm/nextflow.config new file mode 100644 index 00000000..81698ecd --- /dev/null +++ b/tests/modules/ngscheckmate/ncm/nextflow.config @@ -0,0 +1,27 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: BEDTOOLS_MAKEWINDOWS { + ext.args = '-w 1' + } + + withName: BCFTOOLS_MPILEUP { + ext.args2 = '--no-version --ploidy 1 --multiallelic-caller' + ext.args3 = '--no-version' + } + + withName: BCFTOOLS_MPILEUP2 { + ext.args2 = '--no-version --ploidy 1 --multiallelic-caller' + ext.args3 = '--no-version' + } + + withName: NGSCHECKMATE_NCM_VCF { + ext.args = '-V' + } + + withName: NGSCHECKMATE_NCM_BAM { + ext.args = '-B' + } + +} \ No newline at end of file diff --git a/tests/modules/ngscheckmate/ncm/test.yml b/tests/modules/ngscheckmate/ncm/test.yml new file mode 100644 index 00000000..c0c8bc00 --- /dev/null +++ b/tests/modules/ngscheckmate/ncm/test.yml @@ -0,0 +1,29 @@ +- name: ngscheckmate ncm test_ngscheckmate_ncm_bam + command: nextflow run tests/modules/ngscheckmate/ncm -entry test_ngscheckmate_ncm_bam -c tests/config/nextflow.config + tags: + - ngscheckmate/ncm + - ngscheckmate + files: + - path: output/ngscheckmate/output_all.txt + md5sum: f71a712c3f6ecf64dd526365212f1b7c + - path: output/ngscheckmate/output_corr_matrix.txt + md5sum: 6777377aa9ae3d57f841b12896318db0 + - path: output/ngscheckmate/output_matched.txt + md5sum: f71a712c3f6ecf64dd526365212f1b7c + - path: output/ngscheckmate/versions.yml + md5sum: fbb2bebd65b4f4e1e93c6bf5c08a6829 + +- name: ngscheckmate ncm test_ngscheckmate_ncm_vcf + command: nextflow run tests/modules/ngscheckmate/ncm -entry test_ngscheckmate_ncm_vcf -c tests/config/nextflow.config + tags: + - ngscheckmate/ncm + - ngscheckmate + files: + - path: output/ngscheckmate/output_all.txt + md5sum: fd74956dcac279b6f58e82ea73e344f8 + - path: output/ngscheckmate/output_corr_matrix.txt + md5sum: 0c86bdad2721c470fe6be119f291c8e5 + - path: output/ngscheckmate/output_matched.txt + md5sum: fd74956dcac279b6f58e82ea73e344f8 + - path: output/ngscheckmate/versions.yml + md5sum: f06910b83dde194a47870c553cefe193 From 625098a408b1e8c2d91b82b4d5a88197bb6f4d5f Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Mon, 14 Mar 2022 07:32:21 +0100 Subject: [PATCH 033/283] Adapterremoval glob fix (#1391) * fix: remove left-over unnecessary code * Partial fix for AR module output declarations * Remove `def` for prefix so useable in output block * Fix tests * Add adapterlist support * Fix tests after addition of adapter list --- modules/adapterremoval/main.nf | 46 +++++++------------- modules/adapterremoval/meta.yml | 7 +++ tests/modules/adapterremoval/main.nf | 12 ++--- tests/modules/adapterremoval/nextflow.config | 4 ++ tests/modules/adapterremoval/test.yml | 13 +++--- 5 files changed, 41 insertions(+), 41 deletions(-) diff --git a/modules/adapterremoval/main.nf b/modules/adapterremoval/main.nf index 77838287..9d16b9c9 100644 --- a/modules/adapterremoval/main.nf +++ b/modules/adapterremoval/main.nf @@ -9,48 +9,34 @@ process ADAPTERREMOVAL { input: tuple val(meta), path(reads) + path(adapterlist) output: - tuple val(meta), path('*.truncated.gz') , optional: true, emit: singles_truncated - tuple val(meta), path('*.discarded.gz') , optional: true, emit: discarded - tuple val(meta), path('*.pair1.truncated.gz') , optional: true, emit: pair1_truncated - tuple val(meta), path('*.pair2.truncated.gz') , optional: true, emit: pair2_truncated - tuple val(meta), path('*.collapsed.gz') , optional: true, emit: collapsed - tuple val(meta), path('*.collapsed.truncated') , optional: true, emit: collapsed_truncated - tuple val(meta), path('*paired.gz') , optional: true, emit: paired_interleaved - tuple val(meta), path('*.log') , emit: log - path "versions.yml" , emit: versions + tuple val(meta), path("${prefix}.truncated.gz") , optional: true, emit: singles_truncated + tuple val(meta), path("${prefix}.discarded.gz") , optional: true, emit: discarded + tuple val(meta), path("${prefix}.pair1.truncated.gz") , optional: true, emit: pair1_truncated + tuple val(meta), path("${prefix}.pair2.truncated.gz") , optional: true, emit: pair2_truncated + tuple val(meta), path("${prefix}.collapsed.gz") , optional: true, emit: collapsed + tuple val(meta), path("${prefix}.collapsed.truncated.gz") , optional: true, emit: collapsed_truncated + tuple val(meta), path("${prefix}.paired.gz") , optional: true, emit: paired_interleaved + tuple val(meta), path('*.log') , emit: log + path "versions.yml" , emit: versions when: task.ext.when == null || task.ext.when script: def args = task.ext.args ?: '' - def prefix = task.ext.prefix ?: "${meta.id}" + def list = adapterlist ? "--adapter-list ${adapterlist}" : "" + prefix = task.ext.prefix ?: "${meta.id}" if (meta.single_end) { """ AdapterRemoval \\ --file1 $reads \\ $args \\ - --basename $prefix \\ - --threads ${task.cpus} \\ - --settings ${prefix}.log \\ - --seed 42 \\ - --gzip - - cat <<-END_VERSIONS > versions.yml - "${task.process}": - adapterremoval: \$(AdapterRemoval --version 2>&1 | sed -e "s/AdapterRemoval ver. //g") - END_VERSIONS - """ - } else if (!meta.single_end ) { - """ - AdapterRemoval \\ - --file1 ${reads[0]} \\ - --file2 ${reads[1]} \\ - $args \\ - --basename $prefix \\ + $adapterlist \\ + --basename ${prefix} \\ --threads ${task.cpus} \\ --settings ${prefix}.log \\ --seed 42 \\ @@ -67,13 +53,13 @@ process ADAPTERREMOVAL { --file1 ${reads[0]} \\ --file2 ${reads[1]} \\ $args \\ - --basename $prefix \\ + $adapterlist \\ + --basename ${prefix} \\ --threads $task.cpus \\ --settings ${prefix}.log \\ --seed 42 \\ --gzip - cat *.collapsed.gz *.collapsed.truncated.gz > ${prefix}.merged.fastq.gz cat <<-END_VERSIONS > versions.yml "${task.process}": adapterremoval: \$(AdapterRemoval --version 2>&1 | sed -e "s/AdapterRemoval ver. //g") diff --git a/modules/adapterremoval/meta.yml b/modules/adapterremoval/meta.yml index a9a071f5..e395fe4a 100644 --- a/modules/adapterremoval/meta.yml +++ b/modules/adapterremoval/meta.yml @@ -24,6 +24,13 @@ input: List of input FastQ files of size 1 and 2 for single-end and paired-end data, respectively. pattern: "*.{fq,fastq,fq.gz,fastq.gz}" + - adapterlist: + type: file + description: + Optional text file containing list of adapters to look for for removal + with one adapter per line. Otherwise will look for default adapters (see + AdapterRemoval man page), or can be modified to remove user-specified + adapters via ext.args. output: - meta: diff --git a/tests/modules/adapterremoval/main.nf b/tests/modules/adapterremoval/main.nf index ee7f1c44..a427bfbf 100644 --- a/tests/modules/adapterremoval/main.nf +++ b/tests/modules/adapterremoval/main.nf @@ -2,14 +2,16 @@ nextflow.enable.dsl = 2 -include { ADAPTERREMOVAL } from '../../../modules/adapterremoval/main.nf' +include { ADAPTERREMOVAL } from '../../../modules/adapterremoval/main.nf' +include { ADAPTERREMOVAL as ADAPTERREMOVAL_COLLAPSE } from '../../../modules/adapterremoval/main.nf' + workflow test_adapterremoval_single_end { input = [ [ id:'test', single_end:true, collapse:false ], // meta map file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] - ADAPTERREMOVAL ( input ) + ADAPTERREMOVAL ( input, [] ) } workflow test_adapterremoval_paired_end { @@ -18,15 +20,15 @@ workflow test_adapterremoval_paired_end { file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] ] - ADAPTERREMOVAL ( input ) + ADAPTERREMOVAL ( input, [] ) } workflow test_adapterremoval_paired_end_collapse { - input = [ [ id:'test', single_end:false, collapse:true ], // meta map + input = [ [ id:'test', single_end:false ], // meta map [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] ] - ADAPTERREMOVAL ( input ) + ADAPTERREMOVAL_COLLAPSE ( input, [] ) } diff --git a/tests/modules/adapterremoval/nextflow.config b/tests/modules/adapterremoval/nextflow.config index 8730f1c4..b59870ef 100644 --- a/tests/modules/adapterremoval/nextflow.config +++ b/tests/modules/adapterremoval/nextflow.config @@ -2,4 +2,8 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + withName: ADAPTERREMOVAL_COLLAPSE { + ext.args = "--collapse" + } + } diff --git a/tests/modules/adapterremoval/test.yml b/tests/modules/adapterremoval/test.yml index 805af9fc..f6adfba3 100644 --- a/tests/modules/adapterremoval/test.yml +++ b/tests/modules/adapterremoval/test.yml @@ -23,7 +23,6 @@ md5sum: 294a6277f0139bd597e57c6fa31f39c7 - path: output/adapterremoval/test.pair2.truncated.gz md5sum: de7b38e2c881bced8671acb1ab452d78 - - path: output/adapterremoval/test.singleton.truncated.gz - path: output/adapterremoval/versions.yml md5sum: fa621c887897da5a379c719399c17db7 @@ -32,13 +31,15 @@ tags: - adapterremoval files: + - path: output/adapterremoval/test.collapsed.gz + md5sum: ff956de3532599a56c3efe5369f0953f + - path: output/adapterremoval/test.collapsed.truncated.gz - path: output/adapterremoval/test.discarded.gz - path: output/adapterremoval/test.log - md5sum: b8a451d3981b327f3fdb44f40ba2d6d1 + md5sum: 7f0b2328152226e46101a535cce718b3 - path: output/adapterremoval/test.pair1.truncated.gz - md5sum: 294a6277f0139bd597e57c6fa31f39c7 + md5sum: 683be19bc1c83008944b6b719bfa34e1 - path: output/adapterremoval/test.pair2.truncated.gz - md5sum: de7b38e2c881bced8671acb1ab452d78 - - path: output/adapterremoval/test.singleton.truncated.gz + md5sum: e6548fe061f3ef86368b26da930174d0 - path: output/adapterremoval/versions.yml - md5sum: fd428f92a8446e0b34c5ae1c447215b8 + md5sum: 78f589bb313c8da0147ca8ce77d7f3bf From 801240a9714082de913e8a72f4bb434018a50e8b Mon Sep 17 00:00:00 2001 From: Nathan Spix <56930974+njspix@users.noreply.github.com> Date: Mon, 14 Mar 2022 09:34:22 -0400 Subject: [PATCH 034/283] Add core Biscuit tools (#1354) * create files with nf-core command * update meta.yml files * starting to work on index main.nf * prelim test for index * index test working; not finding all output files * index passing tests * index and align passing tests * prototyping biscuitblaster and pileup * update containers * updates to pileup * pileup passing tests * template creation for more biscuit tools * tests passing on blaster,bsconv,pupsom * epiread passing tests, but need to update SNP bed file path * vcf2bed working; change test file * all biscuit commands passing tests * biscuitblaster rename * try to fix permissions * more permission fixes * trying a couple more permission changes * hopefully last permission fixes * really last permission changes * few more permissions * add when blocks * Remove read group meta Co-authored-by: James A. Fellows Yates * remove read group meta Co-authored-by: James A. Fellows Yates * changes for first round of review * update meta.yml with more specific links * Update modules/biscuit/biscuitblaster/main.nf Co-authored-by: Sateesh Peri <33637490+sateeshperi@users.noreply.github.com> * Apply new version reporting Co-authored-by: Sateesh Peri <33637490+sateeshperi@users.noreply.github.com> * Update modules/biscuit/pileup/main.nf Co-authored-by: Sateesh Peri <33637490+sateeshperi@users.noreply.github.com> * Update main.nf * Update modules/biscuit/pileupsomatic/main.nf Co-authored-by: Sateesh Peri <33637490+sateeshperi@users.noreply.github.com> * update test file path * Update modules/biscuit/align/main.nf Co-authored-by: Sateesh Peri <33637490+sateeshperi@users.noreply.github.com> * Update modules/biscuit/align/main.nf Co-authored-by: Sateesh Peri <33637490+sateeshperi@users.noreply.github.com> * tests passing again * Update modules/biscuit/align/main.nf * Update modules/biscuit/bsconv/main.nf * Update modules/biscuit/epiread/main.nf * Update modules/biscuit/index/main.nf * Update test.yml * Update modules/biscuit/pileupsomatic/main.nf * remove module-specific extension/prefix * remove module-specific extension/prefix * add missing args * switch pileup strategy * update test.yml * remove debug * whitespace cleanup * add in newline escapes * requested changes * Update modules/biscuit/pileup/meta.yml Co-authored-by: Spix Co-authored-by: James A. Fellows Yates Co-authored-by: Sateesh Peri <33637490+sateeshperi@users.noreply.github.com> Co-authored-by: Spix Co-authored-by: njspix --- modules/biscuit/align/main.nf | 44 ++++++++++ modules/biscuit/align/meta.yml | 52 ++++++++++++ modules/biscuit/biscuitblaster/main.nf | 52 ++++++++++++ modules/biscuit/biscuitblaster/meta.yml | 78 ++++++++++++++++++ modules/biscuit/bsconv/main.nf | 39 +++++++++ modules/biscuit/bsconv/meta.yml | 55 +++++++++++++ modules/biscuit/epiread/main.nf | 57 +++++++++++++ modules/biscuit/epiread/meta.yml | 58 ++++++++++++++ modules/biscuit/index/main.nf | 33 ++++++++ modules/biscuit/index/meta.yml | 38 +++++++++ modules/biscuit/mergecg/main.nf | 43 ++++++++++ modules/biscuit/mergecg/meta.yml | 51 ++++++++++++ modules/biscuit/pileup/main.nf | 45 +++++++++++ modules/biscuit/pileup/meta.yml | 70 ++++++++++++++++ modules/biscuit/qc/main.nf | 40 ++++++++++ modules/biscuit/qc/meta.yml | 51 ++++++++++++ modules/biscuit/vcf2bed/main.nf | 39 +++++++++ modules/biscuit/vcf2bed/meta.yml | 48 +++++++++++ tests/config/nextflow.config | 2 +- tests/config/pytest_modules.yml | 43 ++++++++++ tests/modules/biscuit/align/main.nf | 33 ++++++++ tests/modules/biscuit/align/nextflow.config | 5 ++ tests/modules/biscuit/align/test.yml | 53 ++++++++++++ tests/modules/biscuit/biscuitblaster/main.nf | 32 ++++++++ .../biscuit/biscuitblaster/nextflow.config | 5 ++ tests/modules/biscuit/biscuitblaster/test.yml | 57 +++++++++++++ tests/modules/biscuit/bsconv/main.nf | 19 +++++ tests/modules/biscuit/bsconv/nextflow.config | 10 +++ tests/modules/biscuit/bsconv/test.yml | 26 ++++++ tests/modules/biscuit/epiread/main.nf | 48 +++++++++++ tests/modules/biscuit/epiread/nextflow.config | 5 ++ tests/modules/biscuit/epiread/test.yml | 80 +++++++++++++++++++ tests/modules/biscuit/index/main.nf | 12 +++ tests/modules/biscuit/index/nextflow.config | 5 ++ tests/modules/biscuit/index/test.yml | 24 ++++++ tests/modules/biscuit/mergecg/main.nf | 18 +++++ tests/modules/biscuit/mergecg/nextflow.config | 5 ++ tests/modules/biscuit/mergecg/test.yml | 26 ++++++ tests/modules/biscuit/pileup/main.nf | 38 +++++++++ tests/modules/biscuit/pileup/nextflow.config | 5 ++ tests/modules/biscuit/pileup/test.yml | 53 ++++++++++++ tests/modules/biscuit/qc/main.nf | 18 +++++ tests/modules/biscuit/qc/nextflow.config | 5 ++ tests/modules/biscuit/qc/test.yml | 38 +++++++++ tests/modules/biscuit/vcf2bed/main.nf | 16 ++++ tests/modules/biscuit/vcf2bed/nextflow.config | 5 ++ tests/modules/biscuit/vcf2bed/test.yml | 10 +++ 47 files changed, 1588 insertions(+), 1 deletion(-) create mode 100644 modules/biscuit/align/main.nf create mode 100644 modules/biscuit/align/meta.yml create mode 100644 modules/biscuit/biscuitblaster/main.nf create mode 100644 modules/biscuit/biscuitblaster/meta.yml create mode 100644 modules/biscuit/bsconv/main.nf create mode 100644 modules/biscuit/bsconv/meta.yml create mode 100644 modules/biscuit/epiread/main.nf create mode 100644 modules/biscuit/epiread/meta.yml create mode 100644 modules/biscuit/index/main.nf create mode 100644 modules/biscuit/index/meta.yml create mode 100644 modules/biscuit/mergecg/main.nf create mode 100644 modules/biscuit/mergecg/meta.yml create mode 100644 modules/biscuit/pileup/main.nf create mode 100644 modules/biscuit/pileup/meta.yml create mode 100644 modules/biscuit/qc/main.nf create mode 100644 modules/biscuit/qc/meta.yml create mode 100644 modules/biscuit/vcf2bed/main.nf create mode 100644 modules/biscuit/vcf2bed/meta.yml create mode 100644 tests/modules/biscuit/align/main.nf create mode 100644 tests/modules/biscuit/align/nextflow.config create mode 100644 tests/modules/biscuit/align/test.yml create mode 100644 tests/modules/biscuit/biscuitblaster/main.nf create mode 100644 tests/modules/biscuit/biscuitblaster/nextflow.config create mode 100644 tests/modules/biscuit/biscuitblaster/test.yml create mode 100644 tests/modules/biscuit/bsconv/main.nf create mode 100644 tests/modules/biscuit/bsconv/nextflow.config create mode 100644 tests/modules/biscuit/bsconv/test.yml create mode 100644 tests/modules/biscuit/epiread/main.nf create mode 100644 tests/modules/biscuit/epiread/nextflow.config create mode 100644 tests/modules/biscuit/epiread/test.yml create mode 100644 tests/modules/biscuit/index/main.nf create mode 100644 tests/modules/biscuit/index/nextflow.config create mode 100644 tests/modules/biscuit/index/test.yml create mode 100644 tests/modules/biscuit/mergecg/main.nf create mode 100644 tests/modules/biscuit/mergecg/nextflow.config create mode 100644 tests/modules/biscuit/mergecg/test.yml create mode 100644 tests/modules/biscuit/pileup/main.nf create mode 100644 tests/modules/biscuit/pileup/nextflow.config create mode 100644 tests/modules/biscuit/pileup/test.yml create mode 100644 tests/modules/biscuit/qc/main.nf create mode 100644 tests/modules/biscuit/qc/nextflow.config create mode 100644 tests/modules/biscuit/qc/test.yml create mode 100644 tests/modules/biscuit/vcf2bed/main.nf create mode 100644 tests/modules/biscuit/vcf2bed/nextflow.config create mode 100644 tests/modules/biscuit/vcf2bed/test.yml diff --git a/modules/biscuit/align/main.nf b/modules/biscuit/align/main.nf new file mode 100644 index 00000000..18e178ff --- /dev/null +++ b/modules/biscuit/align/main.nf @@ -0,0 +1,44 @@ +process BISCUIT_ALIGN { + tag "$meta.id" + label 'process_high' + + conda (params.enable_conda ? "bioconda::biscuit=1.0.2.20220113 bioconda::samtools=1.15" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-db16f1c237a26ea9245cf9924f858974ff321d6e:17fa66297f088a1bc7560b7b90dc273bf23f2d8c-0': + 'quay.io/biocontainers/mulled-v2-db16f1c237a26ea9245cf9924f858974ff321d6e:17fa66297f088a1bc7560b7b90dc273bf23f2d8c-0' }" + + input: + tuple val(meta), path(reads) + path index + + output: + tuple val(meta), path("*.bam"), emit: bam + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def biscuit_cpus = (int) Math.max(Math.floor(task.cpus*0.9),1) + def samtools_cpus = task.cpus-biscuit_cpus + """ + INDEX=`find -L ./ -name "*.bis.amb" | sed 's/.bis.amb//'` + + biscuit align \\ + $args \\ + -@ $biscuit_cpus \\ + \$INDEX \\ + $reads \\ + | samtools sort $args2 --threads $samtools_cpus -o ${prefix}.bam - + + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + biscuit: \$( biscuit version |& sed '1!d; s/^.*BISCUIT Version: //' ) + samtools: \$( samtools --version |& sed '1!d; s/^.*samtools //' ) + END_VERSIONS + """ +} diff --git a/modules/biscuit/align/meta.yml b/modules/biscuit/align/meta.yml new file mode 100644 index 00000000..77af5e4d --- /dev/null +++ b/modules/biscuit/align/meta.yml @@ -0,0 +1,52 @@ +name: biscuit_align +description: Aligns single- or paired-end reads from bisulfite-converted libraries to a reference genome using Biscuit. +keywords: + - biscuit + - DNA methylation + - WGBS + - scWGBS + - bisulfite sequencing + - aligner + - bam +tools: + - biscuit: + description: A utility for analyzing sodium bisulfite conversion-based DNA methylation/modification data + homepage: https://huishenlab.github.io/biscuit/ + documentation: https://huishenlab.github.io/biscuit/docs/alignment + tool_dev_url: https://github.com/huishenlab/biscuit + doi: "" + licence: ["MIT"] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: | + List of input fastq files of size 1 and 2 for single-end and paired-end data, + respectively. + - index: + type: dir + description: Biscuit genome index directory (generated with 'biscuit index') + pattern: "BiscuitIndex" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: Output BAM file containing read alignments + pattern: "*.{bam}" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + +authors: + - "@njspix" diff --git a/modules/biscuit/biscuitblaster/main.nf b/modules/biscuit/biscuitblaster/main.nf new file mode 100644 index 00000000..700bc5e0 --- /dev/null +++ b/modules/biscuit/biscuitblaster/main.nf @@ -0,0 +1,52 @@ +process BISCUIT_BLASTER { + tag "$meta.id" + label 'process_high' + + conda (params.enable_conda ? "bioconda::biscuit=1.0.2.20220113 bioconda::samblaster=0.1.26 bioconda::samtools=1.15" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-db16f1c237a26ea9245cf9924f858974ff321d6e:17fa66297f088a1bc7560b7b90dc273bf23f2d8c-0': + 'quay.io/biocontainers/mulled-v2-db16f1c237a26ea9245cf9924f858974ff321d6e:17fa66297f088a1bc7560b7b90dc273bf23f2d8c-0' }" + + input: + tuple val(meta), path(reads) + path index + + output: + tuple val(meta), path("*.bam"), emit: bam + tuple val(meta), path("*.bai"), emit: bai + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def prefix = task.ext.prefix ?: "${meta.id}" + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + def args3 = task.ext.args3 ?: '' + def biscuit_cpus = (int) Math.max(Math.floor(task.cpus*0.95),1) + def samtools_cpus = task.cpus-biscuit_cpus + """ + INDEX=`find -L ./ -name "*.bis.amb" | sed 's/.bis.amb//'` + + biscuit align \\ + -@ $biscuit_cpus \\ + $args \\ + \$INDEX \\ + $reads | \\ + samblaster \\ + $args2 | \\ + samtools sort \\ + -@ $samtools_cpus \\ + $args3 \\ + --write-index \\ + -o ${prefix}.bam##idx##${prefix}.bam.bai + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + biscuit: \$( biscuit version |& sed '1!d; s/^.*BISCUIT Version: //' ) + samtools: \$( samtools --version |& sed '1!d; s/^.*samtools //' ) + samblaster: \$( samblaster --version |& sed 's/^.*samblaster: Version //' ) + END_VERSIONS + """ +} diff --git a/modules/biscuit/biscuitblaster/meta.yml b/modules/biscuit/biscuitblaster/meta.yml new file mode 100644 index 00000000..eb22dd0f --- /dev/null +++ b/modules/biscuit/biscuitblaster/meta.yml @@ -0,0 +1,78 @@ +name: biscuit_blaster + +description: A fast, compact one-liner to produce duplicate-marked, sorted, and indexed BAM files using Biscuit +keywords: + - biscuit + - DNA methylation + - WGBS + - scWGBS + - bisulfite sequencing + - aligner + - bam + +tools: + - biscuit: + description: A utility for analyzing sodium bisulfite conversion-based DNA methylation/modification data + homepage: https://huishenlab.github.io/biscuit/ + documentation: https://huishenlab.github.io/biscuit/biscuitblaster/ + tool_dev_url: https://github.com/huishenlab/biscuit + doi: "" + licence: ["MIT"] + - samblaster: + description: | + samblaster is a fast and flexible program for marking duplicates in read-id grouped paired-end SAM files. + It can also optionally output discordant read pairs and/or split read mappings to separate SAM files, + and/or unmapped/clipped reads to a separate FASTQ file. + By default, samblaster reads SAM input from stdin and writes SAM to stdout. + homepage: None + documentation: https://github.com/GregoryFaust/samblaster + tool_dev_url: https://github.com/GregoryFaust/samblaster + doi: "10.1093/bioinformatics/btu314" + licence: ["MIT"] + - samtools: + description: | + SAMtools is a set of utilities for interacting with and post-processing + short DNA sequence read alignments in the SAM, BAM and CRAM formats, written by Heng Li. + These files are generated as output by short read aligners like BWA. + homepage: http://www.htslib.org/ + documentation: hhttp://www.htslib.org/doc/samtools.html + doi: 10.1093/bioinformatics/btp352 + licence: ["MIT"] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: | + List of input fastq files of size 1 and 2 for single-end and paired-end data, + respectively. + - index: + type: dir + description: Biscuit genome index directory (generated with 'biscuit index') + pattern: "BiscuitIndex" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: Output BAM file containing read alignments + pattern: "*.{bam}" + - bai: + type: file + description: Output BAM index + pattern: "*.{bai}" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + +authors: + - "@njspix" diff --git a/modules/biscuit/bsconv/main.nf b/modules/biscuit/bsconv/main.nf new file mode 100644 index 00000000..8c5ee91f --- /dev/null +++ b/modules/biscuit/bsconv/main.nf @@ -0,0 +1,39 @@ +process BISCUIT_BSCONV { + tag "$meta.id" + label 'process_long' + + conda (params.enable_conda ? "bioconda::biscuit=1.0.2.20220113" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/biscuit:1.0.2.20220113--h81a5ba2_0': + 'quay.io/biocontainers/biscuit:1.0.2.20220113--h81a5ba2_0' }" + + input: + tuple val(meta), path(bam), path(bai) + path(index) + + output: + tuple val(meta), path("*.bam"), emit: bsconv_bam + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + if ("$bam" == "${prefix}.bam") error "Input and output names are the same, set prefix in module configuration to disambiguate!" + """ + INDEX=`find -L ./ -name "*.bis.amb" | sed 's/.bis.amb//'` + + biscuit bsconv \\ + $args \\ + \$INDEX \\ + $bam \\ + ${prefix}.bam + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + biscuit: \$( biscuit version |& sed '1!d; s/^.*BISCUIT Version: //' ) + END_VERSIONS + """ +} diff --git a/modules/biscuit/bsconv/meta.yml b/modules/biscuit/bsconv/meta.yml new file mode 100644 index 00000000..fa05ee47 --- /dev/null +++ b/modules/biscuit/bsconv/meta.yml @@ -0,0 +1,55 @@ +name: biscuit_bsconv +description: Summarize and/or filter reads based on bisulfite conversion rate +keywords: + - biscuit + - DNA methylation + - WGBS + - scWGBS + - bisulfite sequencing + - aligner + - bam + - filter + +tools: + - biscuit: + description: A utility for analyzing sodium bisulfite conversion-based DNA methylation/modification data + homepage: https://huishenlab.github.io/biscuit/ + documentation: https://huishenlab.github.io/biscuit/docs/subcommand_help.html#biscuit-bsconv + tool_dev_url: https://github.com/huishenlab/biscuit + doi: "" + licence: ["MIT"] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: BAM file contained mapped reads + - bai: + type: file + description: BAM file index + - index: + type: dir + description: Biscuit genome index directory (generated with 'biscuit index') + pattern: "BiscuitIndex" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bsconv_bam: + type: file + description: Output BAM file containing filtered read alignments + pattern: "*.{bam}" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + +authors: + - "@njspix" diff --git a/modules/biscuit/epiread/main.nf b/modules/biscuit/epiread/main.nf new file mode 100644 index 00000000..bc8c6d9f --- /dev/null +++ b/modules/biscuit/epiread/main.nf @@ -0,0 +1,57 @@ +process BISCUIT_EPIREAD { + tag "$meta.id" + label 'process_long' + + conda (params.enable_conda ? "bioconda::biscuit=1.0.2.20220113 bioconda::samtools=1.15" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-db16f1c237a26ea9245cf9924f858974ff321d6e:17fa66297f088a1bc7560b7b90dc273bf23f2d8c-0': + 'quay.io/biocontainers/mulled-v2-db16f1c237a26ea9245cf9924f858974ff321d6e:17fa66297f088a1bc7560b7b90dc273bf23f2d8c-0' }" + + input: + tuple val(meta), path(bam), path(bai), path(snp_bed) + path(index) + + output: + tuple val(meta), path("*.bed.gz"), emit: epiread_bed + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def biscuit_cpus = (int) Math.max(Math.floor(task.cpus*0.9),1) + def samtools_cpus = task.cpus-biscuit_cpus + // As of 2/25/22, epiread does not support reading a gzipped SNP BED file. + // This is a bit hacky but allows the user to supply a gzipped OR uncompressed bed file + def unzip_snp_bed = snp_bed && (snp_bed.toString() =~ /\.gz$/) ? "bgzip -d ${snp_bed}" : "" + def unzipped_snp_bed = snp_bed ? snp_bed.toString() - ~/\.gz$/: "" + // SNP BED input is optional + def options_snp_bed = snp_bed ? "-B ${unzipped_snp_bed}" : "" + if ("$options_snp_bed" == "${prefix}.bed.gz") error "Input and output names are the same, set prefix in module configuration to disambiguate!" + """ + INDEX=`find -L ./ -name "*.bis.amb" | sed 's/.bis.amb//'` + + $unzip_snp_bed + + biscuit epiread \\ + -@ $biscuit_cpus \\ + $args \\ + $options_snp_bed \\ + \$INDEX \\ + $bam | \\ + LC_ALL=C sort -k1,1 -k2,2n | \\ + bgzip \\ + -@ $samtools_cpus \\ + $args2 \\ + -c > ${prefix}.bed.gz + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + biscuit: \$( biscuit version |& sed '1!d; s/^.*BISCUIT Version: //' ) + samtools: \$( samtools --version |& sed '1!d; s/^.*samtools //' ) + END_VERSIONS + """ +} diff --git a/modules/biscuit/epiread/meta.yml b/modules/biscuit/epiread/meta.yml new file mode 100644 index 00000000..357b83df --- /dev/null +++ b/modules/biscuit/epiread/meta.yml @@ -0,0 +1,58 @@ +name: biscuit_epiread +description: | + Summarizes read-level methylation (and optionally SNV) information from a + Biscuit BAM file in a standard-compliant BED format. +keywords: + - biscuit + - DNA methylation + - WGBS + - scWGBS + - bisulfite sequencing + - aligner + - bam +tools: + - biscuit: + description: A utility for analyzing sodium bisulfite conversion-based DNA methylation/modification data + homepage: https://huishenlab.github.io/biscuit/ + documentation: https://huishenlab.github.io/biscuit/epiread_format/ + tool_dev_url: https://github.com/huishenlab/biscuit + doi: "" + licence: ["MIT"] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: Biscuit BAM file + - bai: + type: file + description: BAM index + - snp_bed: + type: file + description: BED file containing SNP information (optional) + - index: + type: dir + description: Biscuit genome index directory (generated with 'biscuit index') + pattern: "BiscuitIndex" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - epiread_bed: + type: file + description: Gzipped BED file with methylation (and optionally SNV) information + pattern: "*.{epiread.bed.gz}" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + +authors: + - "@njspix" diff --git a/modules/biscuit/index/main.nf b/modules/biscuit/index/main.nf new file mode 100644 index 00000000..9aa04330 --- /dev/null +++ b/modules/biscuit/index/main.nf @@ -0,0 +1,33 @@ +process BISCUIT_INDEX { + tag "$fasta" + label 'process_long' + + conda (params.enable_conda ? "bioconda::biscuit=1.0.2.20220113" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/biscuit:1.0.2.20220113--h81a5ba2_0': + 'quay.io/biocontainers/biscuit:1.0.2.20220113--h81a5ba2_0' }" + + input: + path fasta, stageAs: "BiscuitIndex/*" + + output: + path "BiscuitIndex/*.fa*", emit: index, includeInputs: true + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + """ + biscuit \\ + index \\ + $args \\ + $fasta + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + biscuit: \$( biscuit version |& sed '1!d; s/^.*BISCUIT Version: //' ) + END_VERSIONS + """ +} diff --git a/modules/biscuit/index/meta.yml b/modules/biscuit/index/meta.yml new file mode 100644 index 00000000..96134f65 --- /dev/null +++ b/modules/biscuit/index/meta.yml @@ -0,0 +1,38 @@ +name: biscuit_index +description: Indexes a reference genome for use with Biscuit +keywords: + - biscuit + - DNA methylation + - WGBS + - scWGBS + - bisulfite sequencing + - index + - reference + - fasta + +tools: + - biscuit: + description: A utility for analyzing sodium bisulfite conversion-based DNA methylation/modification data + homepage: https://huishenlab.github.io/biscuit/ + documentation: https://huishenlab.github.io/biscuit/docs/alignment + tool_dev_url: https://github.com/huishenlab/biscuit + doi: "" + licence: ["MIT"] + +input: + - fasta: + type: file + description: Input genome fasta file + +output: + - index: + type: dir + description: Biscuit genome index directory + pattern: "BiscuitIndex" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + +authors: + - "@njspix" diff --git a/modules/biscuit/mergecg/main.nf b/modules/biscuit/mergecg/main.nf new file mode 100644 index 00000000..6cafdb36 --- /dev/null +++ b/modules/biscuit/mergecg/main.nf @@ -0,0 +1,43 @@ +process BISCUIT_MERGECG { + tag "$meta.id" + label 'process_long' + + conda (params.enable_conda ? "bioconda::biscuit=1.0.2.20220113 bioconda::samtools=1.15" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-db16f1c237a26ea9245cf9924f858974ff321d6e:17fa66297f088a1bc7560b7b90dc273bf23f2d8c-0': + 'quay.io/biocontainers/mulled-v2-db16f1c237a26ea9245cf9924f858974ff321d6e:17fa66297f088a1bc7560b7b90dc273bf23f2d8c-0' }" + + input: + tuple val(meta), path(bed) + path index + + output: + tuple val(meta), path("*.bed.gz"), emit: mergecg_bed + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + """ + INDEX=`find -L ./ -name "*.bis.amb" | sed 's/.bis.amb//'` + + biscuit mergecg \\ + $args \\ + \$INDEX \\ + $bed | \\ + LC_ALL=C sort -k1,1 -k2,2n | \\ + bgzip \\ + $args2 \\ + -c > ${prefix}.bed.gz + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + biscuit: \$( biscuit version |& sed '1!d; s/^.*BISCUIT Version: //' ) + samtools: \$( samtools --version |& sed '1!d; s/^.*samtools //' ) + END_VERSIONS + """ +} diff --git a/modules/biscuit/mergecg/meta.yml b/modules/biscuit/mergecg/meta.yml new file mode 100644 index 00000000..25f6b9e2 --- /dev/null +++ b/modules/biscuit/mergecg/meta.yml @@ -0,0 +1,51 @@ +name: biscuit_mergecg +description: Merges methylation information for opposite-strand C's in a CpG context +keywords: + - biscuit + - DNA methylation + - WGBS + - scWGBS + - bisulfite sequencing + - aligner + - bed +tools: + - biscuit: + description: A utility for analyzing sodium bisulfite conversion-based DNA methylation/modification data + homepage: https://huishenlab.github.io/biscuit/ + documentation: https://huishenlab.github.io/biscuit/docs/methylextraction.html + tool_dev_url: https://github.com/huishenlab/biscuit + doi: "" + licence: ["MIT"] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bed: + type: file + description: | + Biscuit BED file (output of biscuit vcf2bed) + - index: + type: dir + description: Biscuit genome index directory (generated with 'biscuit index') + pattern: "BiscuitIndex" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - mergecg_bed: + type: file + description: Gzipped BED file with merged methylation information + pattern: "*.bed.gz" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + +authors: + - "@njspix" diff --git a/modules/biscuit/pileup/main.nf b/modules/biscuit/pileup/main.nf new file mode 100644 index 00000000..dcddc418 --- /dev/null +++ b/modules/biscuit/pileup/main.nf @@ -0,0 +1,45 @@ +process BISCUIT_PILEUP { + tag "$meta.id" + label 'process_high' + + conda (params.enable_conda ? "bioconda::biscuit=1.0.2.20220113 bioconda::samtools=1.15" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-db16f1c237a26ea9245cf9924f858974ff321d6e:17fa66297f088a1bc7560b7b90dc273bf23f2d8c-0': + 'quay.io/biocontainers/mulled-v2-db16f1c237a26ea9245cf9924f858974ff321d6e:17fa66297f088a1bc7560b7b90dc273bf23f2d8c-0' }" + + input: + tuple val(meta), path(normal_bams), path(normal_bais), path(tumor_bam), path(tumor_bai) + path index + + output: + tuple val(meta), path("*.vcf.gz"), emit: vcf + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def biscuit_cpus = (int) Math.max(Math.floor(task.cpus*0.9),1) + def bgzip_cpus = task.cpus-biscuit_cpus + if ( tumor_bam != [] && normal_bams.toList().size() > 1 ) error "[BISCUIT_PILEUP] error: Tumor BAM provided with more than one normal BAM" + if ( tumor_bam.toList().size() > 1 ) error "[BISCUIT_PILEUP] error: more than one tumor BAM provided" + input = ( tumor_bam==[] ) ? "${normal_bams}" : "-S -T ${tumor_bam} -I ${normal_bams}" + """ + INDEX=`find -L ./ -name "*.bis.amb" | sed 's/.bis.amb//'` + + biscuit pileup \\ + -@ $biscuit_cpus \\ + $args \\ + \$INDEX \\ + $input \\ + | bgzip -@ $bgzip_cpus $args2 > ${prefix}.vcf.gz + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + biscuit: \$( biscuit version |& sed '1!d; s/^.*BISCUIT Version: //' ) + END_VERSIONS + """ +} diff --git a/modules/biscuit/pileup/meta.yml b/modules/biscuit/pileup/meta.yml new file mode 100644 index 00000000..399e3c2f --- /dev/null +++ b/modules/biscuit/pileup/meta.yml @@ -0,0 +1,70 @@ +name: biscuit_pileup +description: Computes cytosine methylation and callable SNV mutations, optionally in reference to a germline BAM to call somatic variants +keywords: + - bisulfite + - DNA methylation + - pileup + - variant calling + - WGBS + - scWGBS + - bam + - vcf +tools: + - biscuit: + description: A utility for analyzing sodium bisulfite conversion-based DNA methylation/modification data + homepage: https://huishenlab.github.io/biscuit/ + documentation: https://huishenlab.github.io/biscuit/docs/pileup.html + tool_dev_url: https://github.com/huishenlab/biscuit + doi: "" + licence: ["MIT"] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - normal_bams: + type: file(s) + description: | + BAM files to be analyzed. If no tumor_bam file is provided, any number of "normal" BAMs may be provided + ("normal" here is just a semantic issue, these BAMs could be from tumor or any other kind of tissue). If a + tumor BAM file is provided, exactly one normal (germline) BAM must be provided. + pattern: "*.{bam}" + - normal_bais: + type: file(s) + description: BAM index file or files corresponding to the provided normal_bams + pattern: "*.{bai}" + - tumor_bam: + type: file(s) + description: | + Optional. If a tumor BAM file is provided, pileup will run in "somatic" mode and will annotate variants with + their somatic state (present in tumor only, present in normal only, present in both, etc). Note that if a + tumor BAM file is provided, exactly one normal BAM must be provided. + pattern: "*.{bam}" + - tumor_bai: + type: file(s) + description: Optional. BAM index file corresponding to provided tumor_bam + pattern: "*.{bai}" + - index: + type: dir + description: Biscuit genome index directory (generated with 'biscuit index') + pattern: "BiscuitIndex" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - vcf: + type: file + description: vcf file with methylation information + pattern: "*.{vcf.gz}" + +authors: + - "@njspix" diff --git a/modules/biscuit/qc/main.nf b/modules/biscuit/qc/main.nf new file mode 100644 index 00000000..dea6473b --- /dev/null +++ b/modules/biscuit/qc/main.nf @@ -0,0 +1,40 @@ +process BISCUIT_QC { + tag "$meta.id" + label 'process_long' + + conda (params.enable_conda ? "bioconda::biscuit=1.0.2.20220113" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/biscuit:1.0.2.20220113--h81a5ba2_0': + 'quay.io/biocontainers/biscuit:1.0.2.20220113--h81a5ba2_0' }" + + input: + tuple val(meta), path(bam) + path(index) + + output: + tuple val(meta), path("*.txt"), emit: biscuit_qc_reports + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def se = meta.single_end ? "-s" : "" + """ + INDEX=`find -L ./ -name "*.bis.amb" | sed 's/.bis.amb//'` + + biscuit qc \\ + $args \\ + $se \\ + \$INDEX \\ + $bam \\ + $prefix + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + biscuit: \$( biscuit version |& sed '1!d; s/^.*BISCUIT Version: //' ) + END_VERSIONS + """ +} diff --git a/modules/biscuit/qc/meta.yml b/modules/biscuit/qc/meta.yml new file mode 100644 index 00000000..a3e65a90 --- /dev/null +++ b/modules/biscuit/qc/meta.yml @@ -0,0 +1,51 @@ +name: biscuit_qc +description: Perform basic quality control on a BAM file generated with Biscuit +keywords: + - biscuit + - DNA methylation + - WGBS + - scWGBS + - bisulfite sequencing + - index + - BAM + - quality control + +tools: + - biscuit: + description: A utility for analyzing sodium bisulfite conversion-based DNA methylation/modification data + homepage: https://huishenlab.github.io/biscuit/ + documentation: https://huishenlab.github.io/biscuit/docs/subcommand_help.html#biscuit-qc + tool_dev_url: https://github.com/huishenlab/biscuit + doi: "" + licence: ["MIT"] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: BAM file produced using Biscuit + +output: + - biscuit_qc_reports: + type: file + description: | + Summary files containing the following information: + - CpG retention by position in read + - CpH retention by position in read + - Read duplication statistics + - Insert size distribution + - Distribution of mapping qualities + - Proportion of reads mapping to each strand + - Read-averaged cytosine conversion rate for CpA, CpC, CpG, and CpT + pattern: "*.txt" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + +authors: + - "@njspix" diff --git a/modules/biscuit/vcf2bed/main.nf b/modules/biscuit/vcf2bed/main.nf new file mode 100644 index 00000000..7bbcc826 --- /dev/null +++ b/modules/biscuit/vcf2bed/main.nf @@ -0,0 +1,39 @@ +process BISCUIT_VCF2BED { + tag "$meta.id" + label 'process_long' + + conda (params.enable_conda ? "bioconda::biscuit=1.0.2.20220113 bioconda::samtools=1.15" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-db16f1c237a26ea9245cf9924f858974ff321d6e:17fa66297f088a1bc7560b7b90dc273bf23f2d8c-0': + 'quay.io/biocontainers/mulled-v2-db16f1c237a26ea9245cf9924f858974ff321d6e:17fa66297f088a1bc7560b7b90dc273bf23f2d8c-0' }" + + input: + tuple val(meta), path(vcf) + + output: + tuple val(meta), path("*.bed.gz"), emit: bed + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + """ + biscuit vcf2bed \\ + $args \\ + $vcf | \\ + LC_ALL=C sort -k1,1 -k2,2n | \\ + bgzip \\ + $args2 \\ + -c > ${prefix}.bed.gz + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + biscuit: \$(echo \$(biscuit version 2>&1) | sed 's/^.*BISCUIT Version: //; s/Using.*\$//') + samtools: \$( samtools --version |& sed '1!d; s/^.*samtools //' ) + END_VERSIONS + """ +} diff --git a/modules/biscuit/vcf2bed/meta.yml b/modules/biscuit/vcf2bed/meta.yml new file mode 100644 index 00000000..c34d5a4d --- /dev/null +++ b/modules/biscuit/vcf2bed/meta.yml @@ -0,0 +1,48 @@ +name: biscuit_vcf2bed +description: | + Summarizes methylation or SNV information from a Biscuit VCF in a + standard-compliant BED file. +keywords: + - biscuit + - DNA methylation + - WGBS + - scWGBS + - bisulfite sequencing + - aligner + - vcf +tools: + - biscuit: + description: A utility for analyzing sodium bisulfite conversion-based DNA methylation/modification data + homepage: https://huishenlab.github.io/biscuit/ + documentation: https://huishenlab.github.io/biscuit/docs/methylextraction.html + tool_dev_url: https://github.com/huishenlab/biscuit + doi: "" + licence: ["MIT"] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - vcf: + type: file + description: Biscuit vcf file (output of biscuit pileup) + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bed: + type: file + description: Gzipped BED file with methylation or SNV information + pattern: "*.{bed.gz}" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + +authors: + - "@njspix" diff --git a/tests/config/nextflow.config b/tests/config/nextflow.config index 741edf5e..4ea085f9 100644 --- a/tests/config/nextflow.config +++ b/tests/config/nextflow.config @@ -28,5 +28,5 @@ conda { createTimeout = "120 min" } includeConfig 'test_data.config' manifest { - nextflowVersion = '!>=21.10.3' + nextflowVersion = '!>=21.10.0' } diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 5c110ba7..6ecab096 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -198,6 +198,49 @@ bedtools/subtract: - modules/bedtools/subtract/** - tests/modules/bedtools/subtract/** +biscuit/align: + - modules/biscuit/index/** + - modules/biscuit/align/** + - tests/modules/biscuit/align/** + +biscuit/biscuitblaster: + - modules/biscuit/index/** + - modules/biscuit/biscuitblaster/** + - tests/modules/biscuit/biscuitblaster/** + +biscuit/bsconv: + - modules/biscuit/index/** + - modules/biscuit/bsconv/** + - tests/modules/biscuit/bsconv/** + +biscuit/epiread: + - modules/biscuit/index/** + - modules/biscuit/epiread/** + - tests/modules/biscuit/epiread/** + +biscuit/index: + - modules/biscuit/index/** + - tests/modules/biscuit/index/** + +biscuit/mergecg: + - modules/biscuit/index/** + - modules/biscuit/mergecg/** + - tests/modules/biscuit/mergecg/** + +biscuit/pileup: + - modules/biscuit/index/** + - modules/biscuit/pileup/** + - tests/modules/biscuit/pileup/** + +biscuit/qc: + - modules/biscuit/index/** + - modules/biscuit/qc/** + - tests/modules/biscuit/qc/** + +biscuit/vcf2bed: + - modules/biscuit/vcf2bed/** + - tests/modules/biscuit/vcf2bed/** + biobambam/bammarkduplicates2: - modules/biobambam/bammarkduplicates2/** - tests/modules/biobambam/bammarkduplicates2/** diff --git a/tests/modules/biscuit/align/main.nf b/tests/modules/biscuit/align/main.nf new file mode 100644 index 00000000..f3e3cb64 --- /dev/null +++ b/tests/modules/biscuit/align/main.nf @@ -0,0 +1,33 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { BISCUIT_INDEX } from '../../../../modules/biscuit/index/main.nf' +include { BISCUIT_ALIGN as BISCUIT_ALIGN_SE } from '../../../../modules/biscuit/align/main.nf' +include { BISCUIT_ALIGN as BISCUIT_ALIGN_PE } from '../../../../modules/biscuit/align/main.nf' + + +// Single-end test +workflow test_biscuit_align_single { + + input = [ [ id:'test' ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_methylated_1_fastq_gz'], checkIfExists: true) ] + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + BISCUIT_INDEX ( fasta ) + BISCUIT_ALIGN_SE (input, BISCUIT_INDEX.out.index ) +} + +// paired-end test +workflow test_biscuit_align_paired { + + input = [ [ id:'test' ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_methylated_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_methylated_2_fastq_gz'], checkIfExists: true) ] + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + BISCUIT_INDEX ( fasta ) + BISCUIT_ALIGN_SE (input, BISCUIT_INDEX.out.index ) +} diff --git a/tests/modules/biscuit/align/nextflow.config b/tests/modules/biscuit/align/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/biscuit/align/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/biscuit/align/test.yml b/tests/modules/biscuit/align/test.yml new file mode 100644 index 00000000..bf778ff5 --- /dev/null +++ b/tests/modules/biscuit/align/test.yml @@ -0,0 +1,53 @@ +- name: biscuit align test_biscuit_align_single + command: nextflow run tests/modules/biscuit/align -entry test_biscuit_align_single -c tests/config/nextflow.config + tags: + - biscuit + - biscuit/align + files: + - path: output/biscuit/BiscuitIndex/genome.fasta + md5sum: 6e9fe4042a72f2345f644f239272b7e6 + - path: output/biscuit/BiscuitIndex/genome.fasta.bis.amb + md5sum: 3a68b8b2287e07dd3f5f95f4344ba76e + - path: output/biscuit/BiscuitIndex/genome.fasta.bis.ann + md5sum: c32e11f6c859f166c7525a9c1d583567 + - path: output/biscuit/BiscuitIndex/genome.fasta.bis.pac + md5sum: 983e3d2cd6f36e2546e6d25a0da78d66 + - path: output/biscuit/BiscuitIndex/genome.fasta.dau.bwt + md5sum: a11bc31775f7b7a4f9cd3bc4f981661a + - path: output/biscuit/BiscuitIndex/genome.fasta.dau.sa + md5sum: 9c9e07fa1c75ef32d764274579c89b08 + - path: output/biscuit/BiscuitIndex/genome.fasta.par.bwt + md5sum: 62eb83cd557a47b59589713d98024fc2 + - path: output/biscuit/BiscuitIndex/genome.fasta.par.sa + md5sum: 55bcd97d7059bf73dc0d221e36e8e901 + - path: output/biscuit/test.bam + md5sum: eb36532425cb9b259410d6464a9e523a + - path: output/biscuit/versions.yml + md5sum: a86c4170bbf90cc75b93eb59ea124acd + +- name: biscuit align test_biscuit_align_paired + command: nextflow run tests/modules/biscuit/align -entry test_biscuit_align_paired -c tests/config/nextflow.config + tags: + - biscuit + - biscuit/align + files: + - path: output/biscuit/BiscuitIndex/genome.fasta + md5sum: 6e9fe4042a72f2345f644f239272b7e6 + - path: output/biscuit/BiscuitIndex/genome.fasta.bis.amb + md5sum: 3a68b8b2287e07dd3f5f95f4344ba76e + - path: output/biscuit/BiscuitIndex/genome.fasta.bis.ann + md5sum: c32e11f6c859f166c7525a9c1d583567 + - path: output/biscuit/BiscuitIndex/genome.fasta.bis.pac + md5sum: 983e3d2cd6f36e2546e6d25a0da78d66 + - path: output/biscuit/BiscuitIndex/genome.fasta.dau.bwt + md5sum: a11bc31775f7b7a4f9cd3bc4f981661a + - path: output/biscuit/BiscuitIndex/genome.fasta.dau.sa + md5sum: 9c9e07fa1c75ef32d764274579c89b08 + - path: output/biscuit/BiscuitIndex/genome.fasta.par.bwt + md5sum: 62eb83cd557a47b59589713d98024fc2 + - path: output/biscuit/BiscuitIndex/genome.fasta.par.sa + md5sum: 55bcd97d7059bf73dc0d221e36e8e901 + - path: output/biscuit/test.bam + md5sum: be3f6aa86c499d6a6b2996e5936e4f50 + - path: output/biscuit/versions.yml + md5sum: f0b7dffd28f5e6bb1466fce6661d133f diff --git a/tests/modules/biscuit/biscuitblaster/main.nf b/tests/modules/biscuit/biscuitblaster/main.nf new file mode 100644 index 00000000..5e4bc95a --- /dev/null +++ b/tests/modules/biscuit/biscuitblaster/main.nf @@ -0,0 +1,32 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { BISCUIT_INDEX } from '../../../../modules/biscuit/index/main.nf' +include { BISCUIT_BLASTER as BISCUIT_BLASTER_SE } from '../../../../modules/biscuit/biscuitblaster/main.nf' +include { BISCUIT_BLASTER as BISCUIT_BLASTER_PE } from '../../../../modules/biscuit/biscuitblaster/main.nf' + +// Single-end test +workflow test_biscuit_blaster_single { + + input = [ [ id:'test' ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_methylated_1_fastq_gz'], checkIfExists: true) ] + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + BISCUIT_INDEX ( fasta ) + BISCUIT_BLASTER_SE (input, BISCUIT_INDEX.out.index ) +} + +// paired-end test +workflow test_biscuit_blaster_paired { + + input = [ [ id:'test' ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_methylated_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_methylated_2_fastq_gz'], checkIfExists: true) ] + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + BISCUIT_INDEX ( fasta ) + BISCUIT_BLASTER_PE (input, BISCUIT_INDEX.out.index ) +} diff --git a/tests/modules/biscuit/biscuitblaster/nextflow.config b/tests/modules/biscuit/biscuitblaster/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/biscuit/biscuitblaster/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/biscuit/biscuitblaster/test.yml b/tests/modules/biscuit/biscuitblaster/test.yml new file mode 100644 index 00000000..37bb6543 --- /dev/null +++ b/tests/modules/biscuit/biscuitblaster/test.yml @@ -0,0 +1,57 @@ +- name: biscuit biscuitblaster test_biscuit_blaster_single + command: nextflow run tests/modules/biscuit/biscuitblaster -entry test_biscuit_blaster_single -c tests/config/nextflow.config + tags: + - biscuit/biscuitblaster + - biscuit + files: + - path: output/biscuit/BiscuitIndex/genome.fasta + md5sum: 6e9fe4042a72f2345f644f239272b7e6 + - path: output/biscuit/BiscuitIndex/genome.fasta.bis.amb + md5sum: 3a68b8b2287e07dd3f5f95f4344ba76e + - path: output/biscuit/BiscuitIndex/genome.fasta.bis.ann + md5sum: c32e11f6c859f166c7525a9c1d583567 + - path: output/biscuit/BiscuitIndex/genome.fasta.bis.pac + md5sum: 983e3d2cd6f36e2546e6d25a0da78d66 + - path: output/biscuit/BiscuitIndex/genome.fasta.dau.bwt + md5sum: a11bc31775f7b7a4f9cd3bc4f981661a + - path: output/biscuit/BiscuitIndex/genome.fasta.dau.sa + md5sum: 9c9e07fa1c75ef32d764274579c89b08 + - path: output/biscuit/BiscuitIndex/genome.fasta.par.bwt + md5sum: 62eb83cd557a47b59589713d98024fc2 + - path: output/biscuit/BiscuitIndex/genome.fasta.par.sa + md5sum: 55bcd97d7059bf73dc0d221e36e8e901 + - path: output/biscuit/test.bam + md5sum: 9ece50b67349382d38b20c2702e65675 + - path: output/biscuit/test.bam.bai + md5sum: 8f14bb42fd38cc7ce4a3c3a9d7133ea4 + - path: output/biscuit/versions.yml + md5sum: bfb660b5b0d92dde6817a1c6a2a302bb + +- name: biscuit biscuitblaster test_biscuit_blaster_paired + command: nextflow run tests/modules/biscuit/biscuitblaster -entry test_biscuit_blaster_paired -c tests/config/nextflow.config + tags: + - biscuit/biscuitblaster + - biscuit + files: + - path: output/biscuit/BiscuitIndex/genome.fasta + md5sum: 6e9fe4042a72f2345f644f239272b7e6 + - path: output/biscuit/BiscuitIndex/genome.fasta.bis.amb + md5sum: 3a68b8b2287e07dd3f5f95f4344ba76e + - path: output/biscuit/BiscuitIndex/genome.fasta.bis.ann + md5sum: c32e11f6c859f166c7525a9c1d583567 + - path: output/biscuit/BiscuitIndex/genome.fasta.bis.pac + md5sum: 983e3d2cd6f36e2546e6d25a0da78d66 + - path: output/biscuit/BiscuitIndex/genome.fasta.dau.bwt + md5sum: a11bc31775f7b7a4f9cd3bc4f981661a + - path: output/biscuit/BiscuitIndex/genome.fasta.dau.sa + md5sum: 9c9e07fa1c75ef32d764274579c89b08 + - path: output/biscuit/BiscuitIndex/genome.fasta.par.bwt + md5sum: 62eb83cd557a47b59589713d98024fc2 + - path: output/biscuit/BiscuitIndex/genome.fasta.par.sa + md5sum: 55bcd97d7059bf73dc0d221e36e8e901 + - path: output/biscuit/test.bam + md5sum: 0c6de35f38003df6ea5dd036170df91b + - path: output/biscuit/test.bam.bai + md5sum: 0d76977b2e36046cc176112776c5fa4e + - path: output/biscuit/versions.yml + md5sum: 82160a7ad29ccc3a21e59b1869399c04 diff --git a/tests/modules/biscuit/bsconv/main.nf b/tests/modules/biscuit/bsconv/main.nf new file mode 100644 index 00000000..f7338869 --- /dev/null +++ b/tests/modules/biscuit/bsconv/main.nf @@ -0,0 +1,19 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { BISCUIT_INDEX } from '../../../../modules/biscuit/index/main.nf' +include { BISCUIT_BSCONV } from '../../../../modules/biscuit/bsconv/main.nf' + +workflow test_biscuit_bsconv { + + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_methylated_sorted_bam'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_paired_end_methylated_sorted_bam_bai'], checkIfExists: true) + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + BISCUIT_INDEX( fasta ) + BISCUIT_BSCONV ( input, BISCUIT_INDEX.out.index ) +} diff --git a/tests/modules/biscuit/bsconv/nextflow.config b/tests/modules/biscuit/bsconv/nextflow.config new file mode 100644 index 00000000..133905d0 --- /dev/null +++ b/tests/modules/biscuit/bsconv/nextflow.config @@ -0,0 +1,10 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: '.*BISCUIT_BSCONV' { + ext.args = '-f 0.1' + } + +} + diff --git a/tests/modules/biscuit/bsconv/test.yml b/tests/modules/biscuit/bsconv/test.yml new file mode 100644 index 00000000..528a4fe5 --- /dev/null +++ b/tests/modules/biscuit/bsconv/test.yml @@ -0,0 +1,26 @@ +- name: biscuit bsconv test_biscuit_bsconv + command: nextflow run tests/modules/biscuit/bsconv -entry test_biscuit_bsconv -c tests/config/nextflow.config + tags: + - biscuit + - biscuit/bsconv + files: + - path: output/biscuit/BiscuitIndex/genome.fasta + md5sum: 6e9fe4042a72f2345f644f239272b7e6 + - path: output/biscuit/BiscuitIndex/genome.fasta.bis.amb + md5sum: 3a68b8b2287e07dd3f5f95f4344ba76e + - path: output/biscuit/BiscuitIndex/genome.fasta.bis.ann + md5sum: c32e11f6c859f166c7525a9c1d583567 + - path: output/biscuit/BiscuitIndex/genome.fasta.bis.pac + md5sum: 983e3d2cd6f36e2546e6d25a0da78d66 + - path: output/biscuit/BiscuitIndex/genome.fasta.dau.bwt + md5sum: a11bc31775f7b7a4f9cd3bc4f981661a + - path: output/biscuit/BiscuitIndex/genome.fasta.dau.sa + md5sum: 9c9e07fa1c75ef32d764274579c89b08 + - path: output/biscuit/BiscuitIndex/genome.fasta.par.bwt + md5sum: 62eb83cd557a47b59589713d98024fc2 + - path: output/biscuit/BiscuitIndex/genome.fasta.par.sa + md5sum: 55bcd97d7059bf73dc0d221e36e8e901 + - path: output/biscuit/test.bam + md5sum: e33e9498d00dd32222b90a6bd981226f + - path: output/biscuit/versions.yml + md5sum: 7deec1f096203542bbb72ac4fa05f9ba diff --git a/tests/modules/biscuit/epiread/main.nf b/tests/modules/biscuit/epiread/main.nf new file mode 100644 index 00000000..54a73ae3 --- /dev/null +++ b/tests/modules/biscuit/epiread/main.nf @@ -0,0 +1,48 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { BISCUIT_INDEX } from '../../../../modules/biscuit/index/main.nf' +include { BISCUIT_EPIREAD } from '../../../../modules/biscuit/epiread/main.nf' + +workflow test_biscuit_epiread_nosnp { + + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_methylated_sorted_bam'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_paired_end_methylated_sorted_bam_bai'], checkIfExists: true), + [] //SNP BED file + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + BISCUIT_INDEX( fasta ) + BISCUIT_EPIREAD ( input, BISCUIT_INDEX.out.index ) +} + +workflow test_biscuit_epiread_snp { + + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_methylated_sorted_bam'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_paired_end_methylated_sorted_bam_bai'], checkIfExists: true), + file('https://github.com/nf-core/test-datasets/raw/modules/data/delete_me/biscuit/test-snp.bed') + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + BISCUIT_INDEX( fasta ) + BISCUIT_EPIREAD ( input, BISCUIT_INDEX.out.index ) +} + +workflow test_biscuit_epiread_snp_decompress { + + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_methylated_sorted_bam'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_paired_end_methylated_sorted_bam_bai'], checkIfExists: true), + file('https://github.com/nf-core/test-datasets/raw/modules/data/delete_me/biscuit/test-snp.bed.gz') + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + BISCUIT_INDEX( fasta ) + BISCUIT_EPIREAD ( input, BISCUIT_INDEX.out.index ) +} diff --git a/tests/modules/biscuit/epiread/nextflow.config b/tests/modules/biscuit/epiread/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/biscuit/epiread/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/biscuit/epiread/test.yml b/tests/modules/biscuit/epiread/test.yml new file mode 100644 index 00000000..1db86982 --- /dev/null +++ b/tests/modules/biscuit/epiread/test.yml @@ -0,0 +1,80 @@ +- name: biscuit epiread test_biscuit_epiread_nosnp + command: nextflow run tests/modules/biscuit/epiread -entry test_biscuit_epiread_nosnp -c tests/config/nextflow.config + tags: + - biscuit + - biscuit/epiread + files: + - path: output/biscuit/BiscuitIndex/genome.fasta + md5sum: 6e9fe4042a72f2345f644f239272b7e6 + - path: output/biscuit/BiscuitIndex/genome.fasta.bis.amb + md5sum: 3a68b8b2287e07dd3f5f95f4344ba76e + - path: output/biscuit/BiscuitIndex/genome.fasta.bis.ann + md5sum: c32e11f6c859f166c7525a9c1d583567 + - path: output/biscuit/BiscuitIndex/genome.fasta.bis.pac + md5sum: 983e3d2cd6f36e2546e6d25a0da78d66 + - path: output/biscuit/BiscuitIndex/genome.fasta.dau.bwt + md5sum: a11bc31775f7b7a4f9cd3bc4f981661a + - path: output/biscuit/BiscuitIndex/genome.fasta.dau.sa + md5sum: 9c9e07fa1c75ef32d764274579c89b08 + - path: output/biscuit/BiscuitIndex/genome.fasta.par.bwt + md5sum: 62eb83cd557a47b59589713d98024fc2 + - path: output/biscuit/BiscuitIndex/genome.fasta.par.sa + md5sum: 55bcd97d7059bf73dc0d221e36e8e901 + - path: output/biscuit/test.bed.gz + md5sum: dbb30b59f4ef6fdfdee38630225c0574 + - path: output/biscuit/versions.yml + md5sum: 674a77ac5ca8f4b42d30e58e30c3a9af + +- name: biscuit epiread test_biscuit_epiread_snp + command: nextflow run tests/modules/biscuit/epiread -entry test_biscuit_epiread_snp -c tests/config/nextflow.config + tags: + - biscuit + - biscuit/epiread + files: + - path: output/biscuit/BiscuitIndex/genome.fasta + md5sum: 6e9fe4042a72f2345f644f239272b7e6 + - path: output/biscuit/BiscuitIndex/genome.fasta.bis.amb + md5sum: 3a68b8b2287e07dd3f5f95f4344ba76e + - path: output/biscuit/BiscuitIndex/genome.fasta.bis.ann + md5sum: c32e11f6c859f166c7525a9c1d583567 + - path: output/biscuit/BiscuitIndex/genome.fasta.bis.pac + md5sum: 983e3d2cd6f36e2546e6d25a0da78d66 + - path: output/biscuit/BiscuitIndex/genome.fasta.dau.bwt + md5sum: a11bc31775f7b7a4f9cd3bc4f981661a + - path: output/biscuit/BiscuitIndex/genome.fasta.dau.sa + md5sum: 9c9e07fa1c75ef32d764274579c89b08 + - path: output/biscuit/BiscuitIndex/genome.fasta.par.bwt + md5sum: 62eb83cd557a47b59589713d98024fc2 + - path: output/biscuit/BiscuitIndex/genome.fasta.par.sa + md5sum: 55bcd97d7059bf73dc0d221e36e8e901 + - path: output/biscuit/test.bed.gz + md5sum: a29fea6ad74453ec94f8220747dab906 + - path: output/biscuit/versions.yml + md5sum: f2f7c4ff3c6a135b1c8a3aff24a44d81 + +- name: biscuit epiread test_biscuit_epiread_snp_decompress + command: nextflow run tests/modules/biscuit/epiread -entry test_biscuit_epiread_snp_decompress -c tests/config/nextflow.config + tags: + - biscuit + - biscuit/epiread + files: + - path: output/biscuit/BiscuitIndex/genome.fasta + md5sum: 6e9fe4042a72f2345f644f239272b7e6 + - path: output/biscuit/BiscuitIndex/genome.fasta.bis.amb + md5sum: 3a68b8b2287e07dd3f5f95f4344ba76e + - path: output/biscuit/BiscuitIndex/genome.fasta.bis.ann + md5sum: c32e11f6c859f166c7525a9c1d583567 + - path: output/biscuit/BiscuitIndex/genome.fasta.bis.pac + md5sum: 983e3d2cd6f36e2546e6d25a0da78d66 + - path: output/biscuit/BiscuitIndex/genome.fasta.dau.bwt + md5sum: a11bc31775f7b7a4f9cd3bc4f981661a + - path: output/biscuit/BiscuitIndex/genome.fasta.dau.sa + md5sum: 9c9e07fa1c75ef32d764274579c89b08 + - path: output/biscuit/BiscuitIndex/genome.fasta.par.bwt + md5sum: 62eb83cd557a47b59589713d98024fc2 + - path: output/biscuit/BiscuitIndex/genome.fasta.par.sa + md5sum: 55bcd97d7059bf73dc0d221e36e8e901 + - path: output/biscuit/test.bed.gz + md5sum: a29fea6ad74453ec94f8220747dab906 + - path: output/biscuit/versions.yml + md5sum: cb0258ebf4e1a731a4310ec17c3dc442 diff --git a/tests/modules/biscuit/index/main.nf b/tests/modules/biscuit/index/main.nf new file mode 100644 index 00000000..c13d441b --- /dev/null +++ b/tests/modules/biscuit/index/main.nf @@ -0,0 +1,12 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { BISCUIT_INDEX } from '../../../../modules/biscuit/index/main.nf' + +workflow test_biscuit_index { + + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + BISCUIT_INDEX ( fasta ) +} diff --git a/tests/modules/biscuit/index/nextflow.config b/tests/modules/biscuit/index/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/biscuit/index/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/biscuit/index/test.yml b/tests/modules/biscuit/index/test.yml new file mode 100644 index 00000000..a2e4cb67 --- /dev/null +++ b/tests/modules/biscuit/index/test.yml @@ -0,0 +1,24 @@ +- name: biscuit index test_biscuit_index + command: nextflow run tests/modules/biscuit/index -entry test_biscuit_index -c tests/config/nextflow.config + tags: + - biscuit/index + - biscuit + files: + - path: output/biscuit/BiscuitIndex/genome.fasta + md5sum: 6e9fe4042a72f2345f644f239272b7e6 + - path: output/biscuit/BiscuitIndex/genome.fasta.bis.amb + md5sum: 3a68b8b2287e07dd3f5f95f4344ba76e + - path: output/biscuit/BiscuitIndex/genome.fasta.bis.ann + md5sum: c32e11f6c859f166c7525a9c1d583567 + - path: output/biscuit/BiscuitIndex/genome.fasta.bis.pac + md5sum: 983e3d2cd6f36e2546e6d25a0da78d66 + - path: output/biscuit/BiscuitIndex/genome.fasta.dau.bwt + md5sum: a11bc31775f7b7a4f9cd3bc4f981661a + - path: output/biscuit/BiscuitIndex/genome.fasta.dau.sa + md5sum: 9c9e07fa1c75ef32d764274579c89b08 + - path: output/biscuit/BiscuitIndex/genome.fasta.par.bwt + md5sum: 62eb83cd557a47b59589713d98024fc2 + - path: output/biscuit/BiscuitIndex/genome.fasta.par.sa + md5sum: 55bcd97d7059bf73dc0d221e36e8e901 + - path: output/biscuit/versions.yml + md5sum: 5c5873e482a57966db246648ffddf62f diff --git a/tests/modules/biscuit/mergecg/main.nf b/tests/modules/biscuit/mergecg/main.nf new file mode 100644 index 00000000..7d51f3b8 --- /dev/null +++ b/tests/modules/biscuit/mergecg/main.nf @@ -0,0 +1,18 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { BISCUIT_INDEX } from '../../../../modules/biscuit/index/main.nf' +include { BISCUIT_MERGECG } from '../../../../modules/biscuit/mergecg/main.nf' + +workflow test_biscuit_mergecg { + + input = [ + [ id:'test', single_end:false ], // meta map + file('https://github.com/nf-core/test-datasets/raw/modules/data/delete_me/biscuit/test-cg.bed.gz', checkIfExists: true) + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + BISCUIT_INDEX( fasta ) + BISCUIT_MERGECG ( input, BISCUIT_INDEX.out.index ) +} diff --git a/tests/modules/biscuit/mergecg/nextflow.config b/tests/modules/biscuit/mergecg/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/biscuit/mergecg/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/biscuit/mergecg/test.yml b/tests/modules/biscuit/mergecg/test.yml new file mode 100644 index 00000000..7b2408bb --- /dev/null +++ b/tests/modules/biscuit/mergecg/test.yml @@ -0,0 +1,26 @@ +- name: biscuit mergecg test_biscuit_mergecg + command: nextflow run tests/modules/biscuit/mergecg -entry test_biscuit_mergecg -c tests/config/nextflow.config + tags: + - biscuit + - biscuit/mergecg + files: + - path: output/biscuit/BiscuitIndex/genome.fasta + md5sum: 6e9fe4042a72f2345f644f239272b7e6 + - path: output/biscuit/BiscuitIndex/genome.fasta.bis.amb + md5sum: 3a68b8b2287e07dd3f5f95f4344ba76e + - path: output/biscuit/BiscuitIndex/genome.fasta.bis.ann + md5sum: c32e11f6c859f166c7525a9c1d583567 + - path: output/biscuit/BiscuitIndex/genome.fasta.bis.pac + md5sum: 983e3d2cd6f36e2546e6d25a0da78d66 + - path: output/biscuit/BiscuitIndex/genome.fasta.dau.bwt + md5sum: a11bc31775f7b7a4f9cd3bc4f981661a + - path: output/biscuit/BiscuitIndex/genome.fasta.dau.sa + md5sum: 9c9e07fa1c75ef32d764274579c89b08 + - path: output/biscuit/BiscuitIndex/genome.fasta.par.bwt + md5sum: 62eb83cd557a47b59589713d98024fc2 + - path: output/biscuit/BiscuitIndex/genome.fasta.par.sa + md5sum: 55bcd97d7059bf73dc0d221e36e8e901 + - path: output/biscuit/test.bed.gz + md5sum: d693b28ddc81265f388860d391fc7c5b + - path: output/biscuit/versions.yml + md5sum: f670d63671af06bf8654677bf373b3a1 diff --git a/tests/modules/biscuit/pileup/main.nf b/tests/modules/biscuit/pileup/main.nf new file mode 100644 index 00000000..cf1914ec --- /dev/null +++ b/tests/modules/biscuit/pileup/main.nf @@ -0,0 +1,38 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { BISCUIT_INDEX } from '../../../../modules/biscuit/index/main.nf' +include { BISCUIT_PILEUP } from '../../../../modules/biscuit/pileup/main.nf' + +workflow test_biscuit_pileup { + + input = [ [ id:'test' ], // meta map + [file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_bam'], checkIfExists: true)], + [file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_bam_bai'], checkIfExists: true)], + [], //tumor bam + [] //tumor bai + ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + + BISCUIT_INDEX ( fasta ) + BISCUIT_PILEUP ( input, BISCUIT_INDEX.out.index ) + +} + +workflow test_biscuit_pileup_somatic { + + input = [ [ id:'test' ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_bam_bai'], checkIfExists: true) + ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + + BISCUIT_INDEX ( fasta ) + BISCUIT_PILEUP ( input, BISCUIT_INDEX.out.index ) + +} diff --git a/tests/modules/biscuit/pileup/nextflow.config b/tests/modules/biscuit/pileup/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/biscuit/pileup/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/biscuit/pileup/test.yml b/tests/modules/biscuit/pileup/test.yml new file mode 100644 index 00000000..ad840737 --- /dev/null +++ b/tests/modules/biscuit/pileup/test.yml @@ -0,0 +1,53 @@ +- name: biscuit pileup test_biscuit_pileup + command: nextflow run tests/modules/biscuit/pileup -entry test_biscuit_pileup -c tests/config/nextflow.config + tags: + - biscuit + - biscuit/pileup + files: + - path: output/biscuit/BiscuitIndex/genome.fasta + md5sum: f315020d899597c1b57e5fe9f60f4c3e + - path: output/biscuit/BiscuitIndex/genome.fasta.bis.amb + md5sum: 1891c1de381b3a96d4e72f590fde20c1 + - path: output/biscuit/BiscuitIndex/genome.fasta.bis.ann + md5sum: 2df4aa2d7580639fa0fcdbcad5e2e969 + - path: output/biscuit/BiscuitIndex/genome.fasta.bis.pac + md5sum: 8569fbdb2c98c6fb16dfa73d8eacb070 + - path: output/biscuit/BiscuitIndex/genome.fasta.dau.bwt + md5sum: 668799eea40aefb8013cbf8ed6c47cfe + - path: output/biscuit/BiscuitIndex/genome.fasta.dau.sa + md5sum: 10541b05bbea44d0344b0345a6522ba8 + - path: output/biscuit/BiscuitIndex/genome.fasta.par.bwt + md5sum: 2c38edd64234420add133f5fe1ff975d + - path: output/biscuit/BiscuitIndex/genome.fasta.par.sa + md5sum: 7deee1aac3395d93bef1df11ab38379e + - path: output/biscuit/test.vcf.gz + md5sum: ef9798c318ead0f8a79ee7fdeb1ffbf9 + - path: output/biscuit/versions.yml + md5sum: ae38b891fdbf9f7ff5c486408f949dc5 + +- name: biscuit pileup test_biscuit_pileup_somatic + command: nextflow run tests/modules/biscuit/pileup -entry test_biscuit_pileup_somatic -c tests/config/nextflow.config + tags: + - biscuit + - biscuit/pileup + files: + - path: output/biscuit/BiscuitIndex/genome.fasta + md5sum: f315020d899597c1b57e5fe9f60f4c3e + - path: output/biscuit/BiscuitIndex/genome.fasta.bis.amb + md5sum: 1891c1de381b3a96d4e72f590fde20c1 + - path: output/biscuit/BiscuitIndex/genome.fasta.bis.ann + md5sum: 2df4aa2d7580639fa0fcdbcad5e2e969 + - path: output/biscuit/BiscuitIndex/genome.fasta.bis.pac + md5sum: 8569fbdb2c98c6fb16dfa73d8eacb070 + - path: output/biscuit/BiscuitIndex/genome.fasta.dau.bwt + md5sum: 668799eea40aefb8013cbf8ed6c47cfe + - path: output/biscuit/BiscuitIndex/genome.fasta.dau.sa + md5sum: 10541b05bbea44d0344b0345a6522ba8 + - path: output/biscuit/BiscuitIndex/genome.fasta.par.bwt + md5sum: 2c38edd64234420add133f5fe1ff975d + - path: output/biscuit/BiscuitIndex/genome.fasta.par.sa + md5sum: 7deee1aac3395d93bef1df11ab38379e + - path: output/biscuit/test.vcf.gz + md5sum: 692b4a6191b08fabe5efa5abe00da420 + - path: output/biscuit/versions.yml + md5sum: cc51fd498d67fdc7cc067686eb855b93 diff --git a/tests/modules/biscuit/qc/main.nf b/tests/modules/biscuit/qc/main.nf new file mode 100644 index 00000000..7c6d61cd --- /dev/null +++ b/tests/modules/biscuit/qc/main.nf @@ -0,0 +1,18 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { BISCUIT_INDEX } from '../../../../modules/biscuit/index/main.nf' +include { BISCUIT_QC } from '../../../../modules/biscuit/qc/main.nf' + +workflow test_biscuit_qc { + + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_methylated_sorted_bam'], checkIfExists: true) + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + BISCUIT_INDEX( fasta ) + BISCUIT_QC ( input, BISCUIT_INDEX.out.index ) +} diff --git a/tests/modules/biscuit/qc/nextflow.config b/tests/modules/biscuit/qc/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/biscuit/qc/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/biscuit/qc/test.yml b/tests/modules/biscuit/qc/test.yml new file mode 100644 index 00000000..ed33dd30 --- /dev/null +++ b/tests/modules/biscuit/qc/test.yml @@ -0,0 +1,38 @@ +- name: biscuit qc test_biscuit_qc + command: nextflow run tests/modules/biscuit/qc -entry test_biscuit_qc -c tests/config/nextflow.config + tags: + - biscuit/qc + - biscuit + files: + - path: output/biscuit/BiscuitIndex/genome.fasta + md5sum: 6e9fe4042a72f2345f644f239272b7e6 + - path: output/biscuit/BiscuitIndex/genome.fasta.bis.amb + md5sum: 3a68b8b2287e07dd3f5f95f4344ba76e + - path: output/biscuit/BiscuitIndex/genome.fasta.bis.ann + md5sum: c32e11f6c859f166c7525a9c1d583567 + - path: output/biscuit/BiscuitIndex/genome.fasta.bis.pac + md5sum: 983e3d2cd6f36e2546e6d25a0da78d66 + - path: output/biscuit/BiscuitIndex/genome.fasta.dau.bwt + md5sum: a11bc31775f7b7a4f9cd3bc4f981661a + - path: output/biscuit/BiscuitIndex/genome.fasta.dau.sa + md5sum: 9c9e07fa1c75ef32d764274579c89b08 + - path: output/biscuit/BiscuitIndex/genome.fasta.par.bwt + md5sum: 62eb83cd557a47b59589713d98024fc2 + - path: output/biscuit/BiscuitIndex/genome.fasta.par.sa + md5sum: 55bcd97d7059bf73dc0d221e36e8e901 + - path: output/biscuit/test_CpGRetentionByReadPos.txt + md5sum: 498b6c0af196fb34c8835371b9e9b68a + - path: output/biscuit/test_CpHRetentionByReadPos.txt + md5sum: a266942c5719cecab7f60f63cbe7335d + - path: output/biscuit/test_dup_report.txt + md5sum: 65bddf4fbe9e40d7c6c976060df53e3b + - path: output/biscuit/test_isize_table.txt + md5sum: aadf6f2e271abc334b6146cf164bdda3 + - path: output/biscuit/test_mapq_table.txt + md5sum: c8adaac84bb8db3b7f48e1ed4fccad00 + - path: output/biscuit/test_strand_table.txt + md5sum: 27068382ba6b2dbf313169a85c9dbb3a + - path: output/biscuit/test_totalReadConversionRate.txt + md5sum: 8f0c1fceaebfa74f2757720e3bc85fed + - path: output/biscuit/versions.yml + md5sum: a730fa4888e6882cf1b8ba92645b04ee diff --git a/tests/modules/biscuit/vcf2bed/main.nf b/tests/modules/biscuit/vcf2bed/main.nf new file mode 100644 index 00000000..25597d49 --- /dev/null +++ b/tests/modules/biscuit/vcf2bed/main.nf @@ -0,0 +1,16 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { BISCUIT_VCF2BED } from '../../../../modules/biscuit/vcf2bed/main.nf' + +workflow test_biscuit_vcf2bed { + + input = [ + [ id:'test', single_end:false ], // meta map + file('https://github.com/nf-core/test-datasets/raw/modules/data/delete_me/biscuit/test.vcf.gz', checkIfExists: true) + ] + + BISCUIT_VCF2BED ( input ) + +} diff --git a/tests/modules/biscuit/vcf2bed/nextflow.config b/tests/modules/biscuit/vcf2bed/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/biscuit/vcf2bed/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/biscuit/vcf2bed/test.yml b/tests/modules/biscuit/vcf2bed/test.yml new file mode 100644 index 00000000..7eaa2972 --- /dev/null +++ b/tests/modules/biscuit/vcf2bed/test.yml @@ -0,0 +1,10 @@ +- name: biscuit vcf2bed test_biscuit_vcf2bed + command: nextflow run tests/modules/biscuit/vcf2bed -entry test_biscuit_vcf2bed -c tests/config/nextflow.config + tags: + - biscuit/vcf2bed + - biscuit + files: + - path: output/biscuit/test.bed.gz + md5sum: e2dd492289dc8463f364285e31b9553a + - path: output/biscuit/versions.yml + md5sum: cd784276e2fb6739d55e1b60d12202cd From 50417f5f8c71fa2604cc049230906fb9e57a643d Mon Sep 17 00:00:00 2001 From: Phil Ewels Date: Tue, 15 Mar 2022 06:22:44 +0100 Subject: [PATCH 035/283] Fix indentation in bwa/aln meta (#1392) --- modules/bwa/aln/meta.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/modules/bwa/aln/meta.yml b/modules/bwa/aln/meta.yml index a8b74b8b..8ebaf3da 100644 --- a/modules/bwa/aln/meta.yml +++ b/modules/bwa/aln/meta.yml @@ -21,10 +21,10 @@ tools: input: - meta: - type: map - description: | - Groovy Map containing sample information - e.g. [ id:'test', single_end:false ] + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] - reads: type: file description: | From 86ac223916613d77f8a611d3f19dfd314409fd4c Mon Sep 17 00:00:00 2001 From: Sima Rahimi Date: Tue, 15 Mar 2022 10:20:46 +0100 Subject: [PATCH 036/283] Cnvpytor (#1297) * Added cnvpytor/importreaddepth module * Corrected process name in meta.yml file * added -chrom argument * space correction * Added complementary info * fixed typo * md5sum added * modified the module to work on cram files as well * Added cnvpytor/histogram module and test files * Added cnvpytor/partition module and test files * added cnvpytor/callcnvs module and tests * modified by new modules * Added test file and fixed input path in modules * added when block * little fixes * skip tracking test.yml * removed changes to test if conflicts get resolved * updated outfile name * corrected the version.yml content --- modules/cnvpytor/callcnvs/main.nf | 33 +++++++++++ modules/cnvpytor/callcnvs/meta.yml | 40 ++++++++++++++ modules/cnvpytor/histogram/main.nf | 32 +++++++++++ modules/cnvpytor/histogram/meta.yml | 42 ++++++++++++++ modules/cnvpytor/importreaddepth/main.nf | 38 +++++++++++++ modules/cnvpytor/importreaddepth/meta.yml | 55 +++++++++++++++++++ modules/cnvpytor/partition/main.nf | 32 +++++++++++ modules/cnvpytor/partition/meta.yml | 42 ++++++++++++++ tests/config/test_data.config | 2 + tests/modules/cnvpytor/callcnvs/main.nf | 15 +++++ .../modules/cnvpytor/callcnvs/nextflow.config | 7 +++ tests/modules/cnvpytor/callcnvs/test.yml | 10 ++++ tests/modules/cnvpytor/histogram/main.nf | 15 +++++ .../cnvpytor/histogram/nextflow.config | 7 +++ tests/modules/cnvpytor/histogram/test.yml | 10 ++++ .../modules/cnvpytor/importreaddepth/main.nf | 32 +++++++++++ .../cnvpytor/importreaddepth/nextflow.config | 12 ++++ tests/modules/cnvpytor/partition/main.nf | 15 +++++ .../cnvpytor/partition/nextflow.config | 7 +++ tests/modules/cnvpytor/partition/test.yml | 10 ++++ 20 files changed, 456 insertions(+) create mode 100644 modules/cnvpytor/callcnvs/main.nf create mode 100644 modules/cnvpytor/callcnvs/meta.yml create mode 100644 modules/cnvpytor/histogram/main.nf create mode 100644 modules/cnvpytor/histogram/meta.yml create mode 100644 modules/cnvpytor/importreaddepth/main.nf create mode 100644 modules/cnvpytor/importreaddepth/meta.yml create mode 100644 modules/cnvpytor/partition/main.nf create mode 100644 modules/cnvpytor/partition/meta.yml create mode 100644 tests/modules/cnvpytor/callcnvs/main.nf create mode 100644 tests/modules/cnvpytor/callcnvs/nextflow.config create mode 100644 tests/modules/cnvpytor/callcnvs/test.yml create mode 100644 tests/modules/cnvpytor/histogram/main.nf create mode 100644 tests/modules/cnvpytor/histogram/nextflow.config create mode 100644 tests/modules/cnvpytor/histogram/test.yml create mode 100644 tests/modules/cnvpytor/importreaddepth/main.nf create mode 100644 tests/modules/cnvpytor/importreaddepth/nextflow.config create mode 100644 tests/modules/cnvpytor/partition/main.nf create mode 100644 tests/modules/cnvpytor/partition/nextflow.config create mode 100644 tests/modules/cnvpytor/partition/test.yml diff --git a/modules/cnvpytor/callcnvs/main.nf b/modules/cnvpytor/callcnvs/main.nf new file mode 100644 index 00000000..1d47ce16 --- /dev/null +++ b/modules/cnvpytor/callcnvs/main.nf @@ -0,0 +1,33 @@ +process CNVPYTOR_CALLCNVS { + tag "$meta.id" + label 'process_medium' + + conda (params.enable_conda ? "bioconda::cnvpytor=1.0" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/cnvpytor:A1.0--py39h6a678da_2': + 'quay.io/biocontainers/cnvpytor:1.0--py39h6a678da_2' }" + + input: + tuple val(meta), path(pytor) + + output: + tuple val(meta), path("*.tsv"), emit: cnvs + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '1000' + def prefix = task.ext.prefix ?: "${meta.id}" + """ + cnvpytor \\ + -root $pytor \\ + -call $args > ${prefix}.tsv + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + cnvpytor: \$(echo \$(cnvpytor --version 2>&1) | sed 's/^.*pyCNVnator //; s/Using.*\$//' )) + END_VERSIONS + """ +} diff --git a/modules/cnvpytor/callcnvs/meta.yml b/modules/cnvpytor/callcnvs/meta.yml new file mode 100644 index 00000000..c153b949 --- /dev/null +++ b/modules/cnvpytor/callcnvs/meta.yml @@ -0,0 +1,40 @@ +name: cnvpytor_callcnvs +description: command line tool for calling CNVs in whole genome sequencing data + - CNV calling +tools: + - cnvpytor: + description: calling CNVs using read depth + homepage: https://github.com/abyzovlab/CNVpytor + documentation: https://github.com/abyzovlab/CNVpytor + tool_dev_url: https://github.com/abyzovlab/CNVpytor + doi: "10.1101/2021.01.27.428472v1" + licence: ['MIT'] +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test'] + - pytor: + type: file + description: cnvpytor root file + pattern: "*.{pytor}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test' ] + - cnvs: + type: file + description: file containing identified copy numer variations + pattern: "*.{tsv}" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + + +authors: + - "@sima-r" diff --git a/modules/cnvpytor/histogram/main.nf b/modules/cnvpytor/histogram/main.nf new file mode 100644 index 00000000..29dc1bff --- /dev/null +++ b/modules/cnvpytor/histogram/main.nf @@ -0,0 +1,32 @@ +process CNVPYTOR_HISTOGRAM { + tag "$meta.id" + label 'process_medium' + + conda (params.enable_conda ? "bioconda::cnvpytor=1.0" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/cnvpytor:A1.0--py39h6a678da_2': + 'quay.io/biocontainers/cnvpytor:1.0--py39h6a678da_2' }" + + input: + tuple val(meta), path(pytor) + + output: + tuple val(meta), path("${pytor.baseName}.pytor") , emit: pytor + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '1000' + """ + cnvpytor \\ + -root $pytor \\ + -his $args + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + cnvpytor: \$(echo \$(cnvpytor --version 2>&1) | sed 's/^.*pyCNVnator //; s/Using.*\$//' )) + END_VERSIONS + """ +} diff --git a/modules/cnvpytor/histogram/meta.yml b/modules/cnvpytor/histogram/meta.yml new file mode 100644 index 00000000..8484ddd4 --- /dev/null +++ b/modules/cnvpytor/histogram/meta.yml @@ -0,0 +1,42 @@ +name: cnvpytor_histogram +description: calculates read depth histograms +keywords: + - cnv calling + - histogram +tools: + - cnvpytor: + description: calling CNVs using read depth + homepage: https://github.com/abyzovlab/CNVpytor + documentation: https://github.com/abyzovlab/CNVpytor + tool_dev_url: https://github.com/abyzovlab/CNVpytor + doi: "10.1101/2021.01.27.428472v1" + licence: ['MIT'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test' ] + - pytor: + type: file + description: pytor file containing read depth data + pattern: "*.{pytor}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test' ] + - pytor: + type: file + description: pytor file containing read depth histograms binned based on given bin size(s) + pattern: "*.{pytor}" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + +authors: + - "@sima-r" diff --git a/modules/cnvpytor/importreaddepth/main.nf b/modules/cnvpytor/importreaddepth/main.nf new file mode 100644 index 00000000..9fc7db08 --- /dev/null +++ b/modules/cnvpytor/importreaddepth/main.nf @@ -0,0 +1,38 @@ +process CNVPYTOR_IMPORTREADDEPTH { + tag "$meta.id" + label 'process_medium' + + conda (params.enable_conda ? "bioconda::cnvpytor=1.0" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/cnvpytor:A1.0--py39h6a678da_2': + 'quay.io/biocontainers/cnvpytor:1.0--py39h6a678da_2' }" + + input: + tuple val(meta), path(input_file), path(index) + path fasta + path fai + + output: + tuple val(meta), path("*.pytor") , emit: pytor + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def reference = fasta ? "-T ${fasta}" : '' + """ + cnvpytor \\ + -root ${prefix}.pytor \\ + -rd $input_file \\ + $args \\ + $reference + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + cnvpytor: \$(echo \$(cnvpytor --version 2>&1) | sed 's/^.*pyCNVnator //; s/Using.*\$//' )) + END_VERSIONS + """ +} diff --git a/modules/cnvpytor/importreaddepth/meta.yml b/modules/cnvpytor/importreaddepth/meta.yml new file mode 100644 index 00000000..908c3a74 --- /dev/null +++ b/modules/cnvpytor/importreaddepth/meta.yml @@ -0,0 +1,55 @@ +name: cnvpytor_importreaddepth +description: command line tool for CNV/CNA analysis. This step imports the read depth data into a root pytor file. +keywords: + - read depth + - cnv calling +tools: + - cnvpytor -rd: + description: calling CNVs using read depth + homepage: https://github.com/abyzovlab/CNVpytor + documentation: https://github.com/abyzovlab/CNVpytor + tool_dev_url: https://github.com/abyzovlab/CNVpytor + doi: "10.1101/2021.01.27.428472v1" + licence: ['MIT'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test' ] + - input_file: + type: file + description: BAM/CRAM/SAM file + pattern: "*.{bam,cram}" + - index: + type: file + description: bam file index + pattern: "*.{bai,crai}" + - fasta: + type: file + description: specifies reference genome file (only for cram file without reference genome) + pattern: "*.{fasta,fasta.gz,fa,fa.gz}" + - fai: + type: file + description: Index of reference fasta file + pattern: "*.fai" + + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test' ] + - pytor: + type: file + description: read depth root file in which read depth data binned to 100 base pair bins will be stored. + pattern: "*.{pytor}" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + +authors: + - "@sima-r" diff --git a/modules/cnvpytor/partition/main.nf b/modules/cnvpytor/partition/main.nf new file mode 100644 index 00000000..e3f73955 --- /dev/null +++ b/modules/cnvpytor/partition/main.nf @@ -0,0 +1,32 @@ +process CNVPYTOR_PARTITION { + tag "$meta.id" + label 'process_medium' + + conda (params.enable_conda ? "bioconda::cnvpytor=1.0" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/cnvpytor:A1.0--py39h6a678da_2': + 'quay.io/biocontainers/cnvpytor:1.0--py39h6a678da_2' }" + + input: + tuple val(meta), path(pytor) + + output: + tuple val(meta), path("${pytor.baseName}.pytor"), emit: pytor + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '1000' + """ + cnvpytor \\ + -root $pytor \\ + -partition $args + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + cnvpytor: \$(echo \$(cnvpytor --version 2>&1) | sed 's/^.*pyCNVnator //; s/Using.*\$//' )) + END_VERSIONS + """ +} diff --git a/modules/cnvpytor/partition/meta.yml b/modules/cnvpytor/partition/meta.yml new file mode 100644 index 00000000..3f0a3e21 --- /dev/null +++ b/modules/cnvpytor/partition/meta.yml @@ -0,0 +1,42 @@ +name: cnvpytor_partition +description: partitioning read depth histograms +keywords: + - cnv calling + - partition histograms +tools: + - cnvpytor: + description: calling CNVs using read depth + homepage: https://github.com/abyzovlab/CNVpytor + documentation: https://github.com/abyzovlab/CNVpytor + tool_dev_url: https://github.com/abyzovlab/CNVpytor + doi: "10.1101/2021.01.27.428472v1" + licence: ['MIT'] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test' ] + - pytor: + type: file + description: pytor file containing read depth data + pattern: "*.{pytor}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test' ] + - partitions: + type: file + description: pytor file containing partitions of read depth histograms using mean-shift method + pattern: "*.{pytor}" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + +authors: + - "@sima-r" diff --git a/tests/config/test_data.config b/tests/config/test_data.config index ce4f7ae8..ed1e2861 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -295,6 +295,8 @@ params { test_rnaseq_vcf = "${test_data_dir}/genomics/homo_sapiens/illumina/vcf/test.rnaseq.vcf" test_sv_vcf = "${test_data_dir}/genomics/homo_sapiens/illumina/vcf/sv_query.vcf.gz" + + test_pytor = "${test_data_dir}/genomics/homo_sapiens/illumina/pytor/test.pytor" } 'pacbio' { primers = "${test_data_dir}/genomics/homo_sapiens/pacbio/fasta/primers.fasta" diff --git a/tests/modules/cnvpytor/callcnvs/main.nf b/tests/modules/cnvpytor/callcnvs/main.nf new file mode 100644 index 00000000..96d33250 --- /dev/null +++ b/tests/modules/cnvpytor/callcnvs/main.nf @@ -0,0 +1,15 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { CNVPYTOR_CALLCNVS } from '../../../../modules/cnvpytor/callcnvs/main.nf' + +workflow test_cnvpytor_callcnvs { + + input = [ + [ id:'test'], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_pytor'], checkIfExists: true) + ] + + CNVPYTOR_CALLCNVS ( input ) +} diff --git a/tests/modules/cnvpytor/callcnvs/nextflow.config b/tests/modules/cnvpytor/callcnvs/nextflow.config new file mode 100644 index 00000000..9d132b77 --- /dev/null +++ b/tests/modules/cnvpytor/callcnvs/nextflow.config @@ -0,0 +1,7 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + withName: CNVPYTOR_CALLCNVS { + ext.args = '10000' + } +} diff --git a/tests/modules/cnvpytor/callcnvs/test.yml b/tests/modules/cnvpytor/callcnvs/test.yml new file mode 100644 index 00000000..85bfcc7a --- /dev/null +++ b/tests/modules/cnvpytor/callcnvs/test.yml @@ -0,0 +1,10 @@ +- name: cnvpytor callcnvs test_cnvpytor_callcnvs + command: nextflow run tests/modules/cnvpytor/callcnvs -entry test_cnvpytor_callcnvs -c tests/config/nextflow.config + tags: + - cnvpytor + - cnvpytor/callcnvs + files: + - path: output/cnvpytor/calls.10000.tsv + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/cnvpytor/versions.yml + md5sum: 5fe6ca3ef5c40f9dbf487f28db237821 diff --git a/tests/modules/cnvpytor/histogram/main.nf b/tests/modules/cnvpytor/histogram/main.nf new file mode 100644 index 00000000..2178dee1 --- /dev/null +++ b/tests/modules/cnvpytor/histogram/main.nf @@ -0,0 +1,15 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { CNVPYTOR_HISTOGRAM } from '../../../../modules/cnvpytor/histogram/main.nf' + +workflow test_cnvpytor_histogram { + + input = [ + [ id:'test'], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_pytor'], checkIfExists: true), + ] + + CNVPYTOR_HISTOGRAM ( input ) +} diff --git a/tests/modules/cnvpytor/histogram/nextflow.config b/tests/modules/cnvpytor/histogram/nextflow.config new file mode 100644 index 00000000..de8cfccf --- /dev/null +++ b/tests/modules/cnvpytor/histogram/nextflow.config @@ -0,0 +1,7 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + withName: CNVPYTOR_HISTOGRAM { + ext.args = '10000 100000' + } +} diff --git a/tests/modules/cnvpytor/histogram/test.yml b/tests/modules/cnvpytor/histogram/test.yml new file mode 100644 index 00000000..fd8bcaf4 --- /dev/null +++ b/tests/modules/cnvpytor/histogram/test.yml @@ -0,0 +1,10 @@ +- name: cnvpytor histogram test_cnvpytor_histogram + command: nextflow run tests/modules/cnvpytor/histogram -entry test_cnvpytor_histogram -c tests/config/nextflow.config + tags: + - cnvpytor + - cnvpytor/histogram + files: + - path: output/cnvpytor/test.pytor + md5sum: aa03a8fa15b39f77816705a48e10312a + - path: output/cnvpytor/versions.yml + md5sum: 9a4b176afd5f1a3edeb37eeb301cf464 diff --git a/tests/modules/cnvpytor/importreaddepth/main.nf b/tests/modules/cnvpytor/importreaddepth/main.nf new file mode 100644 index 00000000..0d9be324 --- /dev/null +++ b/tests/modules/cnvpytor/importreaddepth/main.nf @@ -0,0 +1,32 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { CNVPYTOR_IMPORTREADDEPTH } from '../../../../modules/cnvpytor/importreaddepth/main.nf' + + +workflow test_cnvpytor_importreaddepth { + + input = [ + [ id: 'test' ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_bam_bai'], checkIfExists: true) + ] + + CNVPYTOR_IMPORTREADDEPTH (input, [], []) +} + +workflow test_cnvpytor_importreaddepth_cram { + + input = [ + [ id: 'test' ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram_crai'], checkIfExists: true) + ] + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + + CNVPYTOR_IMPORTREADDEPTH (input, fasta, fai) +} diff --git a/tests/modules/cnvpytor/importreaddepth/nextflow.config b/tests/modules/cnvpytor/importreaddepth/nextflow.config new file mode 100644 index 00000000..c60f979e --- /dev/null +++ b/tests/modules/cnvpytor/importreaddepth/nextflow.config @@ -0,0 +1,12 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: CNVPYTOR_IMPORTREADDEPTH { + ext.args = {params.cnvpytor_chr ? "-chrom ${params.cnvpytor_chr}" : '' } + } +} + +params { + cnvpytor_chr = '' // specifies chromosome name(s) the same way as they are described in the sam/bam/cram header e.g. '1 2' or 'chr1 chr2'. + } diff --git a/tests/modules/cnvpytor/partition/main.nf b/tests/modules/cnvpytor/partition/main.nf new file mode 100644 index 00000000..cd8063a4 --- /dev/null +++ b/tests/modules/cnvpytor/partition/main.nf @@ -0,0 +1,15 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { CNVPYTOR_PARTITION } from '../../../../modules/cnvpytor/partition/main.nf' + +workflow test_cnvpytor_partition { + + input = [ + [ id:'test'], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_pytor'], checkIfExists: true) + ] + + CNVPYTOR_PARTITION ( input ) +} diff --git a/tests/modules/cnvpytor/partition/nextflow.config b/tests/modules/cnvpytor/partition/nextflow.config new file mode 100644 index 00000000..b684d42a --- /dev/null +++ b/tests/modules/cnvpytor/partition/nextflow.config @@ -0,0 +1,7 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + withName: CNVPYTOR_PARTITION { + ext.args = '10000 100000' + } +} diff --git a/tests/modules/cnvpytor/partition/test.yml b/tests/modules/cnvpytor/partition/test.yml new file mode 100644 index 00000000..1b838395 --- /dev/null +++ b/tests/modules/cnvpytor/partition/test.yml @@ -0,0 +1,10 @@ +- name: cnvpytor partition test_cnvpytor_partition + command: nextflow run tests/modules/cnvpytor/partition -entry test_cnvpytor_partition -c tests/config/nextflow.config + tags: + - cnvpytor + - cnvpytor/partition + files: + - path: output/cnvpytor/test.pytor + md5sum: aa03a8fa15b39f77816705a48e10312a + - path: output/cnvpytor/versions.yml + md5sum: 8a04506554c58cd170cc050fd9904c6f From d6244b42f596fa26d2ecba4ce862755821ed9da8 Mon Sep 17 00:00:00 2001 From: Lasse Folkersen Date: Tue, 15 Mar 2022 11:18:43 +0100 Subject: [PATCH 037/283] ASCAT (#1332) * First commit * putting correct links for singularity and docker containers (just had to search for bioconda+ascat to find them, and then put them in like the rest of the nf-core tools had it * adding first try of relevant commands (not working yet, just took their basic pipeline example * test commit * remove test * starting up work with module after 3.0.0 upgrade * add ascat.prepareHTS statemet * add location of docker for new mulled alleleCounter+ASCAT container * first full run with ASCAT on HG00154.mapped.ILLUMINA.bwa.GBR.low_coverage.20101123.bam * add notes on dropbox download * use a newer pytest_modules.yml * add outpit * trying to align with current Sarek output * adding in FH comments * busy clearing up arguments and testing. Still WIP * first working run, in nextflow, with sarek-like output. Still needs more work on input arguments * cleaning up before writing up findings * testing with putting in arguments in args * draft for solution 3 style for arguments * one more test added * adding FH map * finished testing maps for args * wrap-up cram/crai test successfully * updates to address ability to put in ref.fasta argument for cram running * adding remaining import-HTS commands in as args, and removing the chr21/chr22 only testing to test-nextflow.config * first test with auto-downloading the s3-data (when not given as an argument) * removing download-logic for supporting files, documenting in meta.yml, fixing ref_fasta bug * adding mulled singularity container * removing tests * fix left padding lint issue * lint failure in meta.yml * more linting errors * add when argument * adding stub functionality * add stub run * correct md5sum for versions.yml * more testing with -runstub * stub code in pure bash - not mixed with R * reformat version.yml * get rid of absolute paths in test.yml * correct wrong md5sum * adding allelecount conda link * rename normal_bam to input_bam etc * let the pipeline dev worry about matching the right loci and allele files * dont hardcode default genomebuild * adding download instruction comment * add doi * fix conda addition bug * add args documentation * test new indent * new test with meta.yml indentation * retry with new meta.yml * retry with new meta.yml - now with empty lines around * retry with new meta.yml - remove trailing whitepsace * trying to fix found quote character that cannot start any token error * try with one empty line above triple-quote and no empty line below * trying with pipe character * checking if its the ending triple quote * one more try with meta.yml * test update bioconda versioning for linting failure * test update bioconda versioning for linting failure 2 * testing allelecounter version error on conda Co-authored-by: @lassefolkersen Co-authored-by: @FriederikeHanssen --- modules/ascat/main.nf | 155 ++++++++++++++++++++++++++++ modules/ascat/meta.yml | 92 +++++++++++++++++ tests/config/pytest_modules.yml | 4 + tests/modules/ascat/main.nf | 64 ++++++++++++ tests/modules/ascat/nextflow.config | 39 +++++++ tests/modules/ascat/test.yml | 25 +++++ 6 files changed, 379 insertions(+) create mode 100644 modules/ascat/main.nf create mode 100644 modules/ascat/meta.yml create mode 100644 tests/modules/ascat/main.nf create mode 100644 tests/modules/ascat/nextflow.config create mode 100644 tests/modules/ascat/test.yml diff --git a/modules/ascat/main.nf b/modules/ascat/main.nf new file mode 100644 index 00000000..1d2bd96f --- /dev/null +++ b/modules/ascat/main.nf @@ -0,0 +1,155 @@ +process ASCAT { + tag "$meta.id" + label 'process_medium' + + conda (params.enable_conda ? "bioconda::ascat=3.0.0 bioconda::cancerit-allelecount-4.3.0": null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-c278c7398beb73294d78639a864352abef2931ce:dfe5aaa885de434adb2b490b68972c5840c6d761-0': + 'quay.io/biocontainers/mulled-v2-c278c7398beb73294d78639a864352abef2931ce:dfe5aaa885de434adb2b490b68972c5840c6d761-0' }" + + input: + tuple val(meta), path(input_normal), path(index_normal), path(input_tumor), path(index_tumor) + path(allele_files) + path(loci_files) + + output: + tuple val(meta), path("*png"), emit: png + tuple val(meta), path("*cnvs.txt"), emit: cnvs + tuple val(meta), path("*purityploidy.txt"), emit: purityploidy + tuple val(meta), path("*segments.txt"), emit: segments + path "versions.yml", emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def gender = args.gender ? "$args.gender" : "NULL" + def genomeVersion = args.genomeVersion ? "$args.genomeVersion" : "NULL" + def purity = args.purity ? "$args.purity" : "NULL" + def ploidy = args.ploidy ? "$args.ploidy" : "NULL" + def gc_files = args.gc_files ? "$args.gc_files" : "NULL" + + def minCounts_arg = args.minCounts ? ",minCounts = $args.minCounts" : "" + def chrom_names_arg = args.chrom_names ? ",chrom_names = $args.chrom_names" : "" + def min_base_qual_arg = args.min_base_qual ? ",min_base_qual = $args.min_base_qual" : "" + def min_map_qual_arg = args.min_map_qual ? ",min_map_qual = $args.min_map_qual" : "" + def ref_fasta_arg = args.ref_fasta ? ",ref.fasta = '$args.ref_fasta'" : "" + def skip_allele_counting_tumour_arg = args.skip_allele_counting_tumour ? ",skip_allele_counting_tumour = $args.skip_allele_counting_tumour" : "" + def skip_allele_counting_normal_arg = args.skip_allele_counting_normal ? ",skip_allele_counting_normal = $args.skip_allele_counting_normal" : "" + + + + """ + #!/usr/bin/env Rscript + library(RColorBrewer) + library(ASCAT) + options(bitmapType='cairo') + + + #prepare from BAM files + ascat.prepareHTS( + tumourseqfile = "$input_tumor", + normalseqfile = "$input_normal", + tumourname = "Tumour", + normalname = "Normal", + allelecounter_exe = "alleleCounter", + alleles.prefix = "$allele_files", + loci.prefix = "$loci_files", + gender = "$gender", + genomeVersion = "$genomeVersion", + nthreads = $task.cpus + $minCounts_arg + $chrom_names_arg + $min_base_qual_arg + $min_map_qual_arg + $ref_fasta_arg + $skip_allele_counting_tumour_arg + $skip_allele_counting_normal_arg + ) + + + #Load the data + ascat.bc = ascat.loadData( + Tumor_LogR_file = "Tumour_tumourLogR.txt", + Tumor_BAF_file = "Tumour_normalBAF.txt", + Germline_LogR_file = "Tumour_normalLogR.txt", + Germline_BAF_file = "Tumour_normalBAF.txt", + genomeVersion = "$genomeVersion", + gender = "$gender" + ) + + #optional GC wave correction + if(!is.null($gc_files)){ + ascat.bc = ascat.GCcorrect(ascat.bc, $gc_files) + } + + #Plot the raw data + ascat.plotRawData(ascat.bc) + + #Segment the data + ascat.bc = ascat.aspcf(ascat.bc) + + #Plot the segmented data + ascat.plotSegmentedData(ascat.bc) + + #Run ASCAT to fit every tumor to a model, inferring ploidy, normal cell contamination, and discrete copy numbers + #If psi and rho are manually set: + if (!is.null($purity) && !is.null($ploidy)){ + ascat.output <- ascat.runAscat(ascat.bc, gamma=1, rho_manual=$purity, psi_manual=$ploidy) + } else if(!is.null($purity) && is.null($ploidy)){ + ascat.output <- ascat.runAscat(ascat.bc, gamma=1, rho_manual=$purity) + } else if(!is.null($ploidy) && is.null($purity)){ + ascat.output <- ascat.runAscat(ascat.bc, gamma=1, psi_manual=$ploidy) + } else { + ascat.output <- ascat.runAscat(ascat.bc, gamma=1) + } + + #Write out segmented regions (including regions with one copy of each allele) + write.table(ascat.output[["segments"]], file=paste0("$prefix", ".segments.txt"), sep="\t", quote=F, row.names=F) + + #Write out CNVs in bed format + cnvs=ascat.output[["segments"]][2:6] + write.table(cnvs, file=paste0("$prefix",".cnvs.txt"), sep="\t", quote=F, row.names=F, col.names=T) + + #Write out purity and ploidy info + summary <- tryCatch({ + matrix(c(ascat.output[["aberrantcellfraction"]], ascat.output[["ploidy"]]), ncol=2, byrow=TRUE)}, error = function(err) { + # error handler picks up where error was generated + print(paste("Could not find optimal solution: ",err)) + return(matrix(c(0,0),nrow=1,ncol=2,byrow = TRUE)) + } + ) + colnames(summary) <- c("AberrantCellFraction","Ploidy") + write.table(summary, file=paste0("$prefix",".purityploidy.txt"), sep="\t", quote=F, row.names=F, col.names=T) + + #version export. Have to hardcode process name and software name because + #won't run inside an R-block + version_file_path="versions.yml" + f <- file(version_file_path,"w") + writeLines("ASCAT:", f) + writeLines(" ascat: 3.0.0",f) + close(f) + """ + + + stub: + def prefix = task.ext.prefix ?: "${meta.id}" + """ + touch ${prefix}.cnvs.txt + touch ${prefix}.purityploidy.txt + touch ${prefix}.segments.txt + touch Tumour.ASCATprofile.png + touch Tumour.ASPCF.png + touch Tumour.germline.png + touch Tumour.rawprofile.png + touch Tumour.sunrise.png + touch Tumour.tumour.png + + echo 'ASCAT:' > versions.yml + echo ' ascat: 3.0.0' >> versions.yml + """ + + +} diff --git a/modules/ascat/meta.yml b/modules/ascat/meta.yml new file mode 100644 index 00000000..949afd6a --- /dev/null +++ b/modules/ascat/meta.yml @@ -0,0 +1,92 @@ +name: ascat +description: copy number profiles of tumour cells. +keywords: + - sort +tools: + - ascat: + description: ASCAT is a method to derive copy number profiles of tumour cells, accounting for normal cell admixture and tumour aneuploidy. ASCAT infers tumour purity (the fraction of tumour cells) and ploidy (the amount of DNA per tumour cell), expressed as multiples of haploid genomes from SNP array or massively parallel sequencing data, and calculates whole-genome allele-specific copy number profiles (the number of copies of both parental alleles for all SNP loci across the genome). + homepage: None + documentation: None + tool_dev_url: https://github.com/Crick-CancerGenomics/ascat + doi: "10.1093/bioinformatics/btaa538" + licence: ['GPL v3'] + +input: + - args: + type: map + description: | + Groovy Map containing tool parameters. MUST follow the structure/keywords below and be provided via modules.config. Parameters must be set between quotes. parameters can be removed from the map, if they are not set. For default values, please check the documentation above. + + ``` + { + [ + "gender": "XX", + "genomeVersion": "hg19" + "purity": , + "ploidy": , + "gc_files": , + "minCounts": , + "chrom_names": , + "min_base_qual": , + "min_map_qual": , + "ref_fasta": , + "skip_allele_counting_tumour": , + "skip_allele_counting_normal": + ] + } + ``` + + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - input_normal: + type: file + description: BAM/CRAM/SAM file + pattern: "*.{bam,cram,sam}" + - index_normal: + type: file + description: index for normal_bam + pattern: "*.{bai}" + - input_tumor: + type: file + description: BAM/CRAM/SAM file + pattern: "*.{bam,cram,sam}" + - index_tumor: + type: file + description: index for tumor_bam + pattern: "*.{bai}" + - allele_files: + type: file + description: allele files for ASCAT. Can be downloaded here https://github.com/VanLoo-lab/ascat/tree/master/ReferenceFiles/WGS + - loci_files: + type: file + description: loci files for ASCAT. Can be downloaded here https://github.com/VanLoo-lab/ascat/tree/master/ReferenceFiles/WGS +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - png: + type: file + description: ASCAT plots + pattern: "*.{png}" + - purityploidy: + type: file + description: purity and ploidy data + pattern: "*.purityploidy.txt" + - segments: + type: file + description: segments data + pattern: "*.segments.txt" +authors: + - "@aasNGC" + - "@lassefolkersen" + - "@FriederikeHanssen" + - "@maxulysse" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 6ecab096..48c3bb7d 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -46,6 +46,10 @@ artic/minion: - modules/artic/minion/** - tests/modules/artic/minion/** +ascat: + - modules/ascat/** + - tests/modules/ascat/** + assemblyscan: - modules/assemblyscan/** - tests/modules/assemblyscan/** diff --git a/tests/modules/ascat/main.nf b/tests/modules/ascat/main.nf new file mode 100644 index 00000000..e1f4f798 --- /dev/null +++ b/tests/modules/ascat/main.nf @@ -0,0 +1,64 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { ASCAT as ASCAT_SIMPLE} from '../../../modules/ascat/main.nf' +include { ASCAT as ASCAT_PLOIDY_AND_PURITY} from '../../../modules/ascat/main.nf' +include { ASCAT as ASCAT_CRAM} from '../../../modules/ascat/main.nf' + + + + +workflow test_ascat { + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_bam_bai'], checkIfExists: true) + ] + + ASCAT_SIMPLE ( input , [], []) +} + + + + + +// extended tests running with 1000 genomes data. Data is downloaded as follows: +// wget ftp://ftp.1000genomes.ebi.ac.uk/vol1/ftp/phase1/data/HG00154/alignment/HG00154.mapped.ILLUMINA.bwa.GBR.low_coverage.20101123.bam +// wget ftp://ftp.1000genomes.ebi.ac.uk/vol1/ftp/phase1/data/HG00154/alignment/HG00154.mapped.ILLUMINA.bwa.GBR.low_coverage.20101123.bam.bai +// wget http://ftp.1000genomes.ebi.ac.uk/vol1/ftp/phase1/data/HG00155/alignment/HG00155.mapped.ILLUMINA.bwa.GBR.low_coverage.20101123.bam +// wget http://ftp.1000genomes.ebi.ac.uk/vol1/ftp/phase1/data/HG00155/alignment/HG00155.mapped.ILLUMINA.bwa.GBR.low_coverage.20101123.bam.bai +//workflow test_ascat_with_ploidy_and_purity { +// input = [ +// [ id:'test', single_end:false ], // meta map +// file("/home/ec2-user/input_files/bams/HG00154.mapped.ILLUMINA.bwa.GBR.low_coverage.20101123.bam", checkIfExists: true), +// file("/home/ec2-user/input_files/bams/HG00154.mapped.ILLUMINA.bwa.GBR.low_coverage.20101123.bam.bai", checkIfExists: true), +// file("/home/ec2-user/input_files/bams/test2.bam", checkIfExists: true), +// file("/home/ec2-user/input_files/bams/test2.bam.bai", checkIfExists: true) +// ] +// +// ASCAT_PLOIDY_AND_PURITY ( input , "/home/ec2-user/input_files/allele_files/G1000_alleles_hg19_chr", "/home/ec2-user/input_files/loci_files/G1000_alleles_hg19_chr") +//} + + +// extended tests running with 1000 genomes data. Data is downloaded as follows: +// wget ftp://ftp.1000genomes.ebi.ac.uk/vol1/ftp/phase3/data/HG00145/alignment/HG00145.mapped.ILLUMINA.bwa.GBR.low_coverage.20120522.bam.cram.crai +// wget ftp://ftp.1000genomes.ebi.ac.uk/vol1/ftp/phase3/data/HG00145/alignment/HG00145.mapped.ILLUMINA.bwa.GBR.low_coverage.20120522.bam.cram +// wget ftp://ftp.1000genomes.ebi.ac.uk/vol1/ftp/phase3/data/HG00146/alignment/HG00146.mapped.ILLUMINA.bwa.GBR.low_coverage.20120522.bam.cram.crai +// wget ftp://ftp.1000genomes.ebi.ac.uk/vol1/ftp/phase3/data/HG00146/alignment/HG00146.mapped.ILLUMINA.bwa.GBR.low_coverage.20120522.bam.cram +//workflow test_ascat_with_crams { +// input = [ +// [ id:'test', single_end:false ], // meta map +// file("/home/ec2-user/input_files/crams/HG00145.mapped.ILLUMINA.bwa.GBR.low_coverage.20120522.bam.cram", checkIfExists: true), +// file("/home/ec2-user/input_files/crams/HG00145.mapped.ILLUMINA.bwa.GBR.low_coverage.20120522.bam.cram.crai", checkIfExists: true), +// file("/home/ec2-user/input_files/crams/duplicate_test.cram", checkIfExists: true), +// file("/home/ec2-user/input_files/crams/duplicate_test.cram.crai", checkIfExists: true) +// ] +// +// ASCAT_CRAM ( input , "/home/ec2-user/input_files/allele_files/G1000_alleles_hg19_chr", "/home/ec2-user/input_files/loci_files/G1000_alleles_hg19_chr") +//} + + + diff --git a/tests/modules/ascat/nextflow.config b/tests/modules/ascat/nextflow.config new file mode 100644 index 00000000..3c6cc53a --- /dev/null +++ b/tests/modules/ascat/nextflow.config @@ -0,0 +1,39 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + + withName: ASCAT_SIMPLE { + ext.args = [ + gender : 'XY', + genomeVersion : 'hg19', + minCounts : '1', + min_base_qual : '1', + min_map_qual : '1', + chrom_names : 'c("21","22")' + ] + } + + + + withName: ASCAT_PLOIDY_AND_PURITY { + ext.args = [ + gender : 'XX', + genomeVersion : 'hg19', + ploidy : '1.7', + purity : '0.24', + chrom_names : 'c("21","22")' + ] + } + + withName: ASCAT_CRAM { + ext.args = [ + gender : 'XX', + genomeVersion : 'hg19', + ref_fasta : '/home/ec2-user/input_files/fasta/human_g1k_v37.fasta', + chrom_names : 'c("21","22")' + ] + } + +} + diff --git a/tests/modules/ascat/test.yml b/tests/modules/ascat/test.yml new file mode 100644 index 00000000..e46c66b4 --- /dev/null +++ b/tests/modules/ascat/test.yml @@ -0,0 +1,25 @@ +- name: ascat test_ascat + command: nextflow run tests/modules/ascat -entry test_ascat -c tests/config/nextflow.config -stub-run + tags: + - ascat + files: + - path: output/ascat/Tumour.ASCATprofile.png + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/ascat/Tumour.ASPCF.png + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/ascat/Tumour.germline.png + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/ascat/Tumour.rawprofile.png + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/ascat/Tumour.sunrise.png + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/ascat/Tumour.tumour.png + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/ascat/test.cnvs.txt + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/ascat/test.purityploidy.txt + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/ascat/test.segments.txt + md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/ascat/versions.yml + md5sum: 1af20694ec11004c4f8bc0c609b06386 From 18e72fa0be121e8a772a84c2d17cbfb0f49ff914 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Matthias=20H=C3=B6rtenhuber?= Date: Tue, 15 Mar 2022 11:56:42 +0100 Subject: [PATCH 038/283] fix indent error, avoiding html tag confusion (#1393) --- modules/controlfreec/meta.yml | 98 +++++++++++++++++------------------ 1 file changed, 49 insertions(+), 49 deletions(-) diff --git a/modules/controlfreec/meta.yml b/modules/controlfreec/meta.yml index 4d1e8674..3e218037 100644 --- a/modules/controlfreec/meta.yml +++ b/modules/controlfreec/meta.yml @@ -17,57 +17,57 @@ tools: input: - args: - type: map - description: | - Groovy Map containing tool parameters. MUST follow the structure/keywords below and be provided via modules.config. - parameters can be removed from the map, if they are not set. All value must be surrounded by quotes, meta map parameters can be set with, i.e. sex = meta.sex: - For default values, please check the documentation above. + type: map + description: | + Groovy Map containing tool parameters. MUST follow the structure/keywords below and be provided via modules.config. + Parameters marked as (optional) can be removed from the map, if they are not set. All values must be surrounded by quotes, meta map parameters can be set with, i.e. `sex = meta.sex`: + For default values, please check the documentation above. - ``` - { - [ - "general" :[ - "bedgraphoutput": , - "breakpointthreshold": , - "breakpointtype": , - "coefficientofvariation": , - "contamination": , - "contaminationadjustment": , - "degree": , - "forcegccontentnormalization": , - "gccontentprofile": , - "intercept": , - "mincnalength": , - "minmappabilityperwindow": , - "minexpectedgc": , - "maxexpectedgc": , - "minimalsubclonepresence": , - "noisydata": , - "ploidy": , - "printNA": , - "readcountthreshold": , - "sex": , - "step": , - "telocentromeric": , - "uniquematch": , - "window": - ], - "control":[ - "inputformat": , - "mateorientation": , - ], - "sample":[ - "inputformat": , - "mateorientation": , - ], - "BAF":[ - "minimalcoverageperposition": , - "minimalqualityperposition": , - "shiftinquality": + ``` + { + [ + "general" :[ + "bedgraphoutput": (optional), + "breakpointthreshold": (optional), + "breakpointtype": (optional), + "coefficientofvariation": (optional), + "contamination": (optional), + "contaminationadjustment": (optional), + "degree": (optional), + "forcegccontentnormalization": (optional), + "gccontentprofile": (optional), + "intercept": (optional), + "mincnalength": (optional), + "minmappabilityperwindow": (optional), + "minexpectedgc": (optional), + "maxexpectedgc": (optional), + "minimalsubclonepresence": (optional), + "noisydata": (optional), + "ploidy": (optional), + "printNA": (optional), + "readcountthreshold": (optional), + "sex": (optional), + "step": (optional), + "telocentromeric": (optional), + "uniquematch": (optional), + "window": (optional) + ], + "control":[ + "inputformat": (required), + "mateorientation": (optional), + ], + "sample":[ + "inputformat": (required), + "mateorientation": (optional), + ], + "BAF":[ + "minimalcoverageperposition": (optional), + "minimalqualityperposition": (optional), + "shiftinquality": (optional) + ] ] - ] - } - ``` + } + ``` - meta: type: map From 73aaecbc3ae62a124e1148b68b1a69dcfd9db0ed Mon Sep 17 00:00:00 2001 From: Michael L Heuer Date: Tue, 15 Mar 2022 11:02:40 -0500 Subject: [PATCH 039/283] Add BAM indexes as input for optitype module. (#1388) --- modules/optitype/main.nf | 2 +- modules/optitype/meta.yml | 4 ++++ tests/config/test_data.config | 3 ++- tests/modules/optitype/main.nf | 3 ++- tests/modules/optitype/test.yml | 2 +- 5 files changed, 10 insertions(+), 4 deletions(-) diff --git a/modules/optitype/main.nf b/modules/optitype/main.nf index 984d4669..b6a3e200 100644 --- a/modules/optitype/main.nf +++ b/modules/optitype/main.nf @@ -8,7 +8,7 @@ process OPTITYPE { 'quay.io/biocontainers/optitype:1.3.5--0' }" input: - tuple val(meta), path(bam) + tuple val(meta), path(bam), path(bai) output: tuple val(meta), path("${prefix}"), emit: output diff --git a/modules/optitype/meta.yml b/modules/optitype/meta.yml index 4cacadda..9ca10774 100644 --- a/modules/optitype/meta.yml +++ b/modules/optitype/meta.yml @@ -22,6 +22,10 @@ input: type: file description: BAM file pattern: "*.{bam}" + - bai: + type: file + description: BAM index file + pattern: "*.{bai}" output: - meta: diff --git a/tests/config/test_data.config b/tests/config/test_data.config index ed1e2861..0d61d1e9 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -192,7 +192,8 @@ params { test_paired_end_umi_unsorted_bam = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/umi/test.paired_end.umi_unsorted.bam" test_paired_end_umi_unsorted_tagged_bam = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/umi/test.paired_end.unsorted_tagged.bam" test_paired_end_hla = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/example_hla_pe.bam" - + test_paired_end_hla_sorted_bam = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/example_hla_pe.sorted.bam" + test_paired_end_hla_sorted_bam_bai = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/example_hla_pe.sorted.bam.bai" test2_paired_end_sorted_bam = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/test2.paired_end.sorted.bam" test2_paired_end_sorted_bam_bai = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/test2.paired_end.sorted.bam.bai" test2_paired_end_name_sorted_bam = "${test_data_dir}/genomics/homo_sapiens/illumina/bam/test2.paired_end.name.sorted.bam" diff --git a/tests/modules/optitype/main.nf b/tests/modules/optitype/main.nf index 55b46f0a..6ed14d96 100644 --- a/tests/modules/optitype/main.nf +++ b/tests/modules/optitype/main.nf @@ -6,7 +6,8 @@ include { OPTITYPE } from '../../../modules/optitype/main.nf' workflow test_optitype { input = [ [ id:'test', seq_type:'dna' ], // meta map - file(params.test_data['homo_sapiens']['illumina']['test_paired_end_hla'], checkIfExists: true) + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_hla_sorted_bam'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_hla_sorted_bam_bai'], checkIfExists: true) ] OPTITYPE ( input ) diff --git a/tests/modules/optitype/test.yml b/tests/modules/optitype/test.yml index bb493077..0cbb99cb 100644 --- a/tests/modules/optitype/test.yml +++ b/tests/modules/optitype/test.yml @@ -6,4 +6,4 @@ - path: output/optitype/test/test_coverage_plot.pdf - path: output/optitype/test/test_result.tsv contains: - - "1446" + - "1439" From df60a58426705cc9feb6850f69b42f00f383df8c Mon Sep 17 00:00:00 2001 From: Lasse Folkersen Date: Wed, 16 Mar 2022 13:29:11 +0100 Subject: [PATCH 040/283] round the < to ( to make markdown work for meta.yml (#1395) * round the < to ( to make markdown work for meta.yml * changing md5sums and stub output so it doesnt trigger the empty file linting error --- modules/ascat/main.nf | 18 +++++++++--------- modules/ascat/meta.yml | 22 +++++++++++----------- tests/modules/ascat/test.yml | 18 +++++++++--------- 3 files changed, 29 insertions(+), 29 deletions(-) diff --git a/modules/ascat/main.nf b/modules/ascat/main.nf index 1d2bd96f..35f262dd 100644 --- a/modules/ascat/main.nf +++ b/modules/ascat/main.nf @@ -137,15 +137,15 @@ process ASCAT { stub: def prefix = task.ext.prefix ?: "${meta.id}" """ - touch ${prefix}.cnvs.txt - touch ${prefix}.purityploidy.txt - touch ${prefix}.segments.txt - touch Tumour.ASCATprofile.png - touch Tumour.ASPCF.png - touch Tumour.germline.png - touch Tumour.rawprofile.png - touch Tumour.sunrise.png - touch Tumour.tumour.png + echo stub > ${prefix}.cnvs.txt + echo stub > ${prefix}.purityploidy.txt + echo stub > ${prefix}.segments.txt + echo stub > Tumour.ASCATprofile.png + echo stub > Tumour.ASPCF.png + echo stub > Tumour.germline.png + echo stub > Tumour.rawprofile.png + echo stub > Tumour.sunrise.png + echo stub > Tumour.tumour.png echo 'ASCAT:' > versions.yml echo ' ascat: 3.0.0' >> versions.yml diff --git a/modules/ascat/meta.yml b/modules/ascat/meta.yml index 949afd6a..b44862a1 100644 --- a/modules/ascat/meta.yml +++ b/modules/ascat/meta.yml @@ -15,23 +15,23 @@ input: - args: type: map description: | - Groovy Map containing tool parameters. MUST follow the structure/keywords below and be provided via modules.config. Parameters must be set between quotes. parameters can be removed from the map, if they are not set. For default values, please check the documentation above. + Groovy Map containing tool parameters. MUST follow the structure/keywords below and be provided via modules.config. Parameters must be set between quotes. (optional) parameters can be removed from the map, if they are not set. For default values, please check the documentation above. ``` { [ "gender": "XX", "genomeVersion": "hg19" - "purity": , - "ploidy": , - "gc_files": , - "minCounts": , - "chrom_names": , - "min_base_qual": , - "min_map_qual": , - "ref_fasta": , - "skip_allele_counting_tumour": , - "skip_allele_counting_normal": + "purity": (optional), + "ploidy": (optional), + "gc_files": (optional), + "minCounts": (optional), + "chrom_names": (optional), + "min_base_qual": (optional), + "min_map_qual": (optional), + "ref_fasta": (optional), + "skip_allele_counting_tumour": (optional), + "skip_allele_counting_normal": (optional) ] } ``` diff --git a/tests/modules/ascat/test.yml b/tests/modules/ascat/test.yml index e46c66b4..668e8823 100644 --- a/tests/modules/ascat/test.yml +++ b/tests/modules/ascat/test.yml @@ -4,22 +4,22 @@ - ascat files: - path: output/ascat/Tumour.ASCATprofile.png - md5sum: d41d8cd98f00b204e9800998ecf8427e + md5sum: f50b84b1db4b83ba62ec1deacc69c260 - path: output/ascat/Tumour.ASPCF.png - md5sum: d41d8cd98f00b204e9800998ecf8427e + md5sum: f50b84b1db4b83ba62ec1deacc69c260 - path: output/ascat/Tumour.germline.png - md5sum: d41d8cd98f00b204e9800998ecf8427e + md5sum: f50b84b1db4b83ba62ec1deacc69c260 - path: output/ascat/Tumour.rawprofile.png - md5sum: d41d8cd98f00b204e9800998ecf8427e + md5sum: f50b84b1db4b83ba62ec1deacc69c260 - path: output/ascat/Tumour.sunrise.png - md5sum: d41d8cd98f00b204e9800998ecf8427e + md5sum: f50b84b1db4b83ba62ec1deacc69c260 - path: output/ascat/Tumour.tumour.png - md5sum: d41d8cd98f00b204e9800998ecf8427e + md5sum: f50b84b1db4b83ba62ec1deacc69c260 - path: output/ascat/test.cnvs.txt - md5sum: d41d8cd98f00b204e9800998ecf8427e + md5sum: f50b84b1db4b83ba62ec1deacc69c260 - path: output/ascat/test.purityploidy.txt - md5sum: d41d8cd98f00b204e9800998ecf8427e + md5sum: f50b84b1db4b83ba62ec1deacc69c260 - path: output/ascat/test.segments.txt - md5sum: d41d8cd98f00b204e9800998ecf8427e + md5sum: f50b84b1db4b83ba62ec1deacc69c260 - path: output/ascat/versions.yml md5sum: 1af20694ec11004c4f8bc0c609b06386 From f469045a95aad1ce911f0e6db7393800535935d2 Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Thu, 17 Mar 2022 13:08:51 +0100 Subject: [PATCH 041/283] Fix DeepARG depenency issue with singularity containers (#1397) * fix: remove left-over unnecessary code * Adds --fakeroot option for singularity usage with DeepARG as workaround for broken conda dependency * Update modules/deeparg/downloaddata/main.nf Co-authored-by: Moritz E. Beber * Update modules/deeparg/predict/main.nf Co-authored-by: Moritz E. Beber * Apply suggestions from code review * Update main.nf * Update main.nf * Update main.nf * Update main.nf * Go back to original logic Co-authored-by: Moritz E. Beber --- modules/deeparg/downloaddata/main.nf | 6 ++++++ modules/deeparg/predict/main.nf | 5 +++++ 2 files changed, 11 insertions(+) diff --git a/modules/deeparg/downloaddata/main.nf b/modules/deeparg/downloaddata/main.nf index aacdc778..78208d21 100644 --- a/modules/deeparg/downloaddata/main.nf +++ b/modules/deeparg/downloaddata/main.nf @@ -7,6 +7,12 @@ process DEEPARG_DOWNLOADDATA { container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/deeparg:1.0.2--pyhdfd78af_1' : 'quay.io/biocontainers/deeparg:1.0.2--pyhdfd78af_1' }" + /* + We have to force singularity to run with --fakeroot to allow reading of a problematic file with borked read-write permissions in an upstream dependency (theanos). + This flag may not be available on all systems and may be considered a security problem. so please document and /or warn for this in your pipeline! + */ + containerOptions { "${workflow.containerEngine}" == 'singularity' ? '--fakeroot' : '' } + input: diff --git a/modules/deeparg/predict/main.nf b/modules/deeparg/predict/main.nf index 3e478e59..05cee2f8 100644 --- a/modules/deeparg/predict/main.nf +++ b/modules/deeparg/predict/main.nf @@ -8,6 +8,11 @@ process DEEPARG_PREDICT { container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity//deeparg:1.0.2--pyhdfd78af_1' : 'quay.io/biocontainers/deeparg:1.0.2--pyhdfd78af_1' }" + /* + We have to force singularity to run with --fakeroot to allow reading of a problematic file with borked read-write permissions in an upstream dependency (theanos). + This flag may not be available on all systems and may be considered a security problem. so please document and /or warn for this in your pipeline! + */ + containerOptions { "${workflow.containerEngine}" == 'singularity' ? '--fakeroot' : '' } input: tuple val(meta), path(fasta), val(model) From 7436eebf334b104eb4a034487681b45045abbd31 Mon Sep 17 00:00:00 2001 From: Edmund Miller Date: Thu, 17 Mar 2022 20:03:14 -0500 Subject: [PATCH 042/283] Bat test (#1226) * CI: print out pytest logs on failure * ci(pytest-workflow): cat => bat * Introduce bug * Use batcat * Force fancification * ci: Use colored output for pytest * Revert "Introduce bug" This reverts commit bc78e5e8d144a823e70f6c57265ac80db687e5fe. Co-authored-by: Gregor Sturm --- .github/workflows/pytest-workflow.yml | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/.github/workflows/pytest-workflow.yml b/.github/workflows/pytest-workflow.yml index b2be6aa3..2e4f70f8 100644 --- a/.github/workflows/pytest-workflow.yml +++ b/.github/workflows/pytest-workflow.yml @@ -86,17 +86,13 @@ jobs: # Test the module - name: Run pytest-workflow # only use one thread for pytest-workflow to avoid race condition on conda cache. - run: TMPDIR=~ PROFILE=${{ matrix.profile }} pytest --tag ${{ matrix.tags }} --symlink --kwdof --git-aware + run: TMPDIR=~ PROFILE=${{ matrix.profile }} pytest --tag ${{ matrix.tags }} --symlink --kwdof --git-aware --color=yes - name: Output log on failure if: failure() run: | - echo "======> log.out <=======" - cat /home/runner/pytest_workflow_*/*/log.out - echo - echo - echo "======> log.err <=======" - cat /home/runner/pytest_workflow_*/*/log.err + sudo apt install bat > /dev/null + batcat --decorations=always --color=always /home/runner/pytest_workflow_*/*/log.{out,err} - name: Upload logs on failure if: failure() From 979e57b7ac6a405a395dd7a6dbe1a275c5bc226b Mon Sep 17 00:00:00 2001 From: nickhsmith Date: Fri, 18 Mar 2022 13:30:03 +0100 Subject: [PATCH 043/283] Manta consistancy (#1407) * update tests * update * update * make the manta inputs consistant for germline/somatic/tumoronly * match chromosomes to cram file (chr21) * undo genotypegvfs * undo genotypegvfs * update manta input structure to match strelka. tuple(relevant input), path(fasta), path(fai) * update tests * fix typos * fix typos Co-authored-by: Smith Nicholas --- modules/manta/germline/main.nf | 3 +-- modules/manta/germline/meta.yml | 16 +++++++------- modules/manta/somatic/main.nf | 4 +--- modules/manta/somatic/meta.yml | 16 +++++++------- modules/manta/tumoronly/main.nf | 4 +--- modules/manta/tumoronly/meta.yml | 16 +++++++------- tests/modules/manta/germline/main.nf | 30 ++++++++++++--------------- tests/modules/manta/somatic/main.nf | 29 ++++++++++++++++++++------ tests/modules/manta/tumoronly/main.nf | 23 ++++++++++---------- 9 files changed, 74 insertions(+), 67 deletions(-) diff --git a/modules/manta/germline/main.nf b/modules/manta/germline/main.nf index 5ddba51b..ef6bd4a3 100644 --- a/modules/manta/germline/main.nf +++ b/modules/manta/germline/main.nf @@ -8,10 +8,9 @@ process MANTA_GERMLINE { 'quay.io/biocontainers/manta:1.6.0--h9ee0642_1' }" input: - tuple val(meta), path(input), path(index) + tuple val(meta), path(input), path(index), path(target_bed), path(target_bed_tbi) path fasta path fasta_fai - tuple path(target_bed), path(target_bed_tbi) output: diff --git a/modules/manta/germline/meta.yml b/modules/manta/germline/meta.yml index d6297ead..b719f075 100644 --- a/modules/manta/germline/meta.yml +++ b/modules/manta/germline/meta.yml @@ -31,14 +31,6 @@ input: type: file description: BAM/CRAM/SAM index file. For joint calling use a list of files. pattern: "*.{bai,crai,sai}" - - fasta: - type: file - description: Genome reference FASTA file - pattern: "*.{fa,fasta}" - - fasta_fai: - type: file - description: Genome reference FASTA index file - pattern: "*.{fa.fai,fasta.fai}" - target_bed: type: file description: BED file containing target regions for variant calling @@ -47,6 +39,14 @@ input: type: file description: Index for BED file containing target regions for variant calling pattern: "*.{bed.tbi}" + - fasta: + type: file + description: Genome reference FASTA file + pattern: "*.{fa,fasta}" + - fasta_fai: + type: file + description: Genome reference FASTA index file + pattern: "*.{fa.fai,fasta.fai}" output: - meta: diff --git a/modules/manta/somatic/main.nf b/modules/manta/somatic/main.nf index 886a8fb9..6313c38b 100644 --- a/modules/manta/somatic/main.nf +++ b/modules/manta/somatic/main.nf @@ -8,11 +8,9 @@ process MANTA_SOMATIC { 'quay.io/biocontainers/manta:1.6.0--h9ee0642_1' }" input: - tuple val(meta), path(input_normal), path(input_index_normal), path(input_tumor), path(input_index_tumor) + tuple val(meta), path(input_normal), path(input_index_normal), path(input_tumor), path(input_index_tumor), path(target_bed), path(target_bed_tbi) path fasta path fai - path target_bed - path target_bed_tbi output: tuple val(meta), path("*.candidate_small_indels.vcf.gz") , emit: candidate_small_indels_vcf diff --git a/modules/manta/somatic/meta.yml b/modules/manta/somatic/meta.yml index ec9cc869..457d66a5 100644 --- a/modules/manta/somatic/meta.yml +++ b/modules/manta/somatic/meta.yml @@ -39,14 +39,6 @@ input: type: file description: BAM/CRAM/SAM index file pattern: "*.{bai,crai,sai}" - - fasta: - type: file - description: Genome reference FASTA file - pattern: "*.{fa,fasta}" - - fai: - type: file - description: Genome reference FASTA index file - pattern: "*.{fa.fai,fasta.fai}" - target_bed: type: file description: BED file containing target regions for variant calling @@ -55,6 +47,14 @@ input: type: file description: Index for BED file containing target regions for variant calling pattern: "*.{bed.tbi}" + - fasta: + type: file + description: Genome reference FASTA file + pattern: "*.{fa,fasta}" + - fai: + type: file + description: Genome reference FASTA index file + pattern: "*.{fa.fai,fasta.fai}" output: - meta: diff --git a/modules/manta/tumoronly/main.nf b/modules/manta/tumoronly/main.nf index 3d52b16e..3361cf82 100644 --- a/modules/manta/tumoronly/main.nf +++ b/modules/manta/tumoronly/main.nf @@ -8,11 +8,9 @@ process MANTA_TUMORONLY { 'quay.io/biocontainers/manta:1.6.0--h9ee0642_1' }" input: - tuple val(meta), path(input), path(input_index) + tuple val(meta), path(input), path(input_index), path(target_bed), path(target_bed_tbi) path fasta path fai - path target_bed - path target_bed_tbi output: tuple val(meta), path("*candidate_small_indels.vcf.gz") , emit: candidate_small_indels_vcf diff --git a/modules/manta/tumoronly/meta.yml b/modules/manta/tumoronly/meta.yml index f902bc77..398d6843 100644 --- a/modules/manta/tumoronly/meta.yml +++ b/modules/manta/tumoronly/meta.yml @@ -31,14 +31,6 @@ input: type: file description: BAM/CRAM/SAM index file pattern: "*.{bai,crai,sai}" - - fasta: - type: file - description: Genome reference FASTA file - pattern: "*.{fa,fasta}" - - fai: - type: file - description: Genome reference FASTA index file - pattern: "*.{fa.fai,fasta.fai}" - target_bed: type: file description: BED file containing target regions for variant calling @@ -47,6 +39,14 @@ input: type: file description: Index for BED file containing target regions for variant calling pattern: "*.{bed.tbi}" + - fasta: + type: file + description: Genome reference FASTA file + pattern: "*.{fa,fasta}" + - fai: + type: file + description: Genome reference FASTA index file + pattern: "*.{fa.fai,fasta.fai}" output: - meta: diff --git a/tests/modules/manta/germline/main.nf b/tests/modules/manta/germline/main.nf index bad62629..0081c29f 100644 --- a/tests/modules/manta/germline/main.nf +++ b/tests/modules/manta/germline/main.nf @@ -7,30 +7,28 @@ include { MANTA_GERMLINE } from '../../../../modules/manta/germline/main.nf' workflow test_manta_germline { input = [ [ id:'test'], // meta map - [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram'], checkIfExists: true)], - [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram_crai'], checkIfExists: true)] + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram_crai'], checkIfExists: true), + [],[] ] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) - bed = [[],[]] - MANTA_GERMLINE ( input, fasta, fai, bed ) + MANTA_GERMLINE ( input, fasta, fai ) } workflow test_manta_germline_target_bed { input = [ [ id:'test'], // meta map - [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram'], checkIfExists: true)], - [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram_crai'], checkIfExists: true)] + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram_crai'], checkIfExists: true), + file(params.test_data['homo_sapiens']['genome']['genome_bed_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['genome']['genome_bed_gz_tbi'], checkIfExists: true) ] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) - bed = [ - file(params.test_data['homo_sapiens']['genome']['genome_bed_gz'], checkIfExists: true), - file(params.test_data['homo_sapiens']['genome']['genome_bed_gz_tbi'], checkIfExists: true), - ] - MANTA_GERMLINE ( input, fasta, fai, bed ) + MANTA_GERMLINE ( input, fasta, fai ) } workflow test_manta_germline_target_bed_jointcalling { @@ -39,14 +37,12 @@ workflow test_manta_germline_target_bed_jointcalling { [file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram'], checkIfExists: true), file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_cram'], checkIfExists: true)], [file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram_crai'], checkIfExists: true), - file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_cram_crai'], checkIfExists: true),] + file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_cram_crai'], checkIfExists: true),], + file(params.test_data['homo_sapiens']['genome']['genome_bed_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['genome']['genome_bed_gz_tbi'], checkIfExists: true) ] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) - bed = [ - file(params.test_data['homo_sapiens']['genome']['genome_bed_gz'], checkIfExists: true), - file(params.test_data['homo_sapiens']['genome']['genome_bed_gz_tbi'], checkIfExists: true), - ] - MANTA_GERMLINE ( input, fasta, fai, bed ) + MANTA_GERMLINE ( input, fasta, fai ) } diff --git a/tests/modules/manta/somatic/main.nf b/tests/modules/manta/somatic/main.nf index 7da41bea..b32a273e 100644 --- a/tests/modules/manta/somatic/main.nf +++ b/tests/modules/manta/somatic/main.nf @@ -11,13 +11,30 @@ workflow test_manta_somatic { file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram'], checkIfExists: true), file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram_crai'], checkIfExists: true), file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_cram'], checkIfExists: true), - file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_cram_crai'], checkIfExists: true) + file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_cram_crai'], checkIfExists: true), + [], [] ] - fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) - fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) - bed = file(params.test_data['homo_sapiens']['genome']['genome_bed_gz'], checkIfExists: true) - bed_tbi = file(params.test_data['homo_sapiens']['genome']['genome_bed_gz_tbi'], checkIfExists: true) + fasta = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta_fai'], checkIfExists: true) - MANTA_SOMATIC ( input, fasta, fai, bed, bed_tbi ) + MANTA_SOMATIC ( input, fasta, fai ) +} + +workflow test_manta_somatic_target_bed { + + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram_crai'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_cram'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_cram_crai'], checkIfExists: true), + file(params.test_data['homo_sapiens']['genome']['genome_21_multi_interval_bed_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['genome']['genome_21_multi_interval_bed_gz_tbi'], checkIfExists: true), + ] + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta_fai'], checkIfExists: true) + + MANTA_SOMATIC ( input, fasta, fai ) } diff --git a/tests/modules/manta/tumoronly/main.nf b/tests/modules/manta/tumoronly/main.nf index be0d3dbb..dbe07914 100644 --- a/tests/modules/manta/tumoronly/main.nf +++ b/tests/modules/manta/tumoronly/main.nf @@ -8,28 +8,27 @@ workflow test_manta_tumoronly { input = [ [ id:'test'], // meta map file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_cram'], checkIfExists: true), - file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_cram_crai'], checkIfExists: true) + file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_cram_crai'], checkIfExists: true), + [], [] ] - fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) - fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) - bed = [] - bed_tbi = [] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta_fai'], checkIfExists: true) - MANTA_TUMORONLY ( input, fasta, fai, bed, bed_tbi ) + MANTA_TUMORONLY ( input, fasta, fai ) } workflow test_manta_tumoronly_target_bed { input = [ [ id:'test'], // meta map file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_cram'], checkIfExists: true), - file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_cram_crai'], checkIfExists: true) + file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_cram_crai'], checkIfExists: true), + file(params.test_data['homo_sapiens']['genome']['genome_21_multi_interval_bed_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['genome']['genome_21_multi_interval_bed_gz_tbi'], checkIfExists: true) ] - fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) - fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) - bed = file(params.test_data['homo_sapiens']['genome']['genome_bed_gz'], checkIfExists: true) - bed_tbi = file(params.test_data['homo_sapiens']['genome']['genome_bed_gz_tbi'], checkIfExists: true) + fasta = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta_fai'], checkIfExists: true) - MANTA_TUMORONLY ( input, fasta, fai, bed, bed_tbi ) + MANTA_TUMORONLY ( input, fasta, fai ) } From f0800157544a82ae222931764483331a81812012 Mon Sep 17 00:00:00 2001 From: Edmund Miller Date: Fri, 18 Mar 2022 08:27:50 -0500 Subject: [PATCH 044/283] Prettier (#1405) * style: Add prettier config files * build: Add prettier vscode extension * ci: Replace markdownlint and yamllint with prettier * style: Run prettier * style: Use indent of 2 for markdown as well https://github.com/nf-core/tools/pull/1470#issuecomment-1071028358 * style: Fix indent * style: Let editorconfig take over tab widths * style: yaml => yml * ci: Run prettier once Co-authored-by: Phil Ewels Co-authored-by: Phil Ewels --- .editorconfig | 2 +- .github/CONTRIBUTING.md | 5 +- .github/PULL_REQUEST_TEMPLATE.md | 6 +- .github/workflows/code-linting.yml | 32 ++-- .gitpod.yml | 18 +-- .markdownlint.yml | 11 -- .prettierignore | 16 ++ .prettierrc.yml | 1 + .yamllint.yml | 5 - README.md | 142 +++++++++--------- modules/abricate/run/meta.yml | 2 +- modules/abricate/summary/meta.yml | 2 +- modules/adapterremoval/meta.yml | 3 +- modules/amrfinderplus/run/meta.yml | 2 +- modules/amrfinderplus/update/meta.yml | 2 +- modules/ascat/meta.yml | 2 +- modules/bcftools/annotate/meta.yml | 2 +- modules/biscuit/pileup/meta.yml | 12 +- modules/cellranger/README.md | 21 ++- modules/cellranger/mkfastq/README.md | 24 ++- modules/cnvpytor/callcnvs/meta.yml | 3 +- modules/cnvpytor/histogram/meta.yml | 2 +- modules/cnvpytor/importreaddepth/meta.yml | 3 +- modules/cnvpytor/partition/meta.yml | 2 +- modules/controlfreec/meta.yml | 3 +- modules/csvtk/split/meta.yml | 3 +- modules/deeptools/bamcoverage/meta.yml | 2 +- modules/faqcs/meta.yml | 2 +- modules/gatk4/combinegvcfs/meta.yml | 5 +- modules/gatk4/fastqtosam/meta.yml | 3 +- modules/gatk4/gatherpileupsummaries/meta.yml | 2 +- modules/gatk4/getpileupsummaries/meta.yml | 1 - modules/gatk4/variantrecalibrator/meta.yml | 4 +- modules/hamronization/deeparg/meta.yml | 2 +- modules/hamronization/summarize/meta.yml | 2 +- modules/hmmer/hmmsearch/meta.yml | 2 +- modules/hpsuissero/meta.yml | 2 +- modules/ichorcna/createpon/meta.yml | 2 +- modules/ichorcna/run/meta.yml | 2 +- modules/legsta/meta.yml | 2 +- modules/macs2/callpeak/meta.yml | 3 +- modules/mafft/meta.yml | 2 +- modules/mobsuite/recon/meta.yml | 2 +- modules/msisensorpro/msi_somatic/meta.yml | 2 +- modules/msisensorpro/scan/meta.yml | 2 +- modules/ngscheckmate/ncm/meta.yml | 2 +- .../picard/addorreplacereadgroups/meta.yml | 2 +- .../picard/createsequencedictionary/meta.yml | 4 +- modules/picard/fixmateinformation/meta.yml | 2 +- modules/picard/sortvcf/meta.yml | 2 +- modules/plink2/score/meta.yml | 6 +- modules/rgi/main/meta.yml | 2 +- modules/rsem/calculateexpression/meta.yml | 2 +- modules/seqkit/pair/meta.yml | 2 +- modules/seqtk/rename/meta.yml | 2 +- modules/seqtk/seq/meta.yml | 2 +- modules/sistr/meta.yml | 2 +- modules/sourmash/sketch/meta.yml | 2 +- modules/ssuissero/meta.yml | 2 +- modules/stranger/meta.yml | 2 +- modules/yara/index/meta.yml | 5 +- modules/yara/mapper/meta.yml | 5 +- tests/modules/gatk4/combinegvcfs/test.yml | 2 +- tests/modules/gatk4/genotypegvcfs/test.yml | 10 +- tests/modules/hmmer/hmmsearch/test.yml | 8 +- tests/modules/ichorcna/createpon/test.yml | 4 +- tests/modules/ichorcna/run/test.yml | 4 +- .../picard/createsequencedictionary/test.yml | 2 +- tests/modules/roary/test.yml | 5 +- 69 files changed, 207 insertions(+), 244 deletions(-) delete mode 100644 .markdownlint.yml create mode 100644 .prettierignore create mode 100644 .prettierrc.yml delete mode 100644 .yamllint.yml diff --git a/.editorconfig b/.editorconfig index a6981b47..835c83dd 100644 --- a/.editorconfig +++ b/.editorconfig @@ -8,5 +8,5 @@ trim_trailing_whitespace = true indent_size = 4 indent_style = space -[*.{yml,yaml}] +[*.{md,yml,yaml}] indent_size = 2 diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md index 7faf4340..cba1dc95 100644 --- a/.github/CONTRIBUTING.md +++ b/.github/CONTRIBUTING.md @@ -16,7 +16,9 @@ Contributions to the code are even more welcome ;) If you'd like to write some code for nf-core/modules, the standard workflow is as follows: 1. Check that there isn't already an issue about your idea in the [nf-core/modules issues](https://github.com/nf-core/modules/issues) to avoid duplicating work - * If there isn't one already, please create one so that others know you're working on this + +- If there isn't one already, please create one so that others know you're working on this + 2. [Fork](https://help.github.com/en/github/getting-started-with-github/fork-a-repo) the [nf-core/modules repository](https://github.com/nf-core/modules) to your GitHub account 3. When adding a module file, follow the [guidelines](https://github.com/nf-core/modules#adding-a-new-module-file) 4. Ensure that [tests are working locally](https://github.com/nf-core/modules#running-tests-locally) @@ -40,7 +42,6 @@ These tests are run both with the latest available version of `Nextflow` and als For further information/help, please consult the [nf-core/modules README](https://github.com/nf-core/modules) and don't hesitate to get in touch on the nf-core Slack [#modules](https://nfcore.slack.com/channels/modules) channel ([join our Slack here](https://nf-co.re/join/slack)). - ### Images and figures For overview images and other documents we follow the nf-core [style guidelines and examples](https://nf-co.re/developers/design_guidelines). diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index cfe07f88..8fc9ae0d 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -27,6 +27,6 @@ Closes #XXX - [ ] Add a resource `label` - [ ] Use BioConda and BioContainers if possible to fulfil software requirements. - Ensure that the test works with either Docker / Singularity. Conda CI tests can be quite flaky: - - [ ] `PROFILE=docker pytest --tag --symlink --keep-workflow-wd --git-aware` - - [ ] `PROFILE=singularity pytest --tag --symlink --keep-workflow-wd --git-aware` - - [ ] `PROFILE=conda pytest --tag --symlink --keep-workflow-wd --git-aware` + - [ ] `PROFILE=docker pytest --tag --symlink --keep-workflow-wd --git-aware` + - [ ] `PROFILE=singularity pytest --tag --symlink --keep-workflow-wd --git-aware` + - [ ] `PROFILE=conda pytest --tag --symlink --keep-workflow-wd --git-aware` diff --git a/.github/workflows/code-linting.yml b/.github/workflows/code-linting.yml index 145dd5d9..9b066467 100644 --- a/.github/workflows/code-linting.yml +++ b/.github/workflows/code-linting.yml @@ -5,20 +5,21 @@ on: pull_request: branches: [master] - jobs: - Markdown: + Prettier: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - name: Check out repository + uses: actions/checkout@v2 - - uses: actions/setup-node@v2 + - name: Install NodeJS + uses: actions/setup-node@v2 - - name: Install markdownlint - run: npm install -g markdownlint-cli + - name: Install Prettier + run: npm install -g prettier - - name: Run Markdownlint - run: markdownlint ${GITHUB_WORKSPACE} -c ${GITHUB_WORKSPACE}/.markdownlint.yml + - name: Run Prettier --check + run: prettier --check . EditorConfig: runs-on: ubuntu-latest @@ -32,18 +33,3 @@ jobs: - name: Run ECLint check run: editorconfig-checker -exclude README.md $(git ls-files | grep -v test) - - YAML: - runs-on: ubuntu-latest - steps: - - name: Check out repository - uses: actions/checkout@v2 - - - name: Install NodeJS - uses: actions/setup-node@v2 - - - name: Install yaml-lint - run: npm install -g yaml-lint - - - name: Run yaml-lint - run: yamllint $(find ${GITHUB_WORKSPACE} -type f -name "*.yaml" -or -name "*.yml") diff --git a/.gitpod.yml b/.gitpod.yml index 25078360..d24968be 100644 --- a/.gitpod.yml +++ b/.gitpod.yml @@ -1,14 +1,14 @@ image: nfcore/gitpod:latest vscode: - extensions: # based on nf-core.nf-core-extensionpack - - codezombiech.gitignore # Language support for .gitignore files + extensions: # based on nf-core.nf-core-extensionpack + - codezombiech.gitignore # Language support for .gitignore files # - cssho.vscode-svgviewer # SVG viewer - - davidanson.vscode-markdownlint # Markdown/CommonMark linting and style checking for Visual Studio Code - - eamodio.gitlens # Quickly glimpse into whom, why, and when a line or code block was changed - - EditorConfig.EditorConfig # override user/workspace settings with settings found in .editorconfig files - - Gruntfuggly.todo-tree # Display TODO and FIXME in a tree view in the activity bar - - mechatroner.rainbow-csv # Highlight columns in csv files in different colors + - esbenp.prettier-vscode # Markdown/CommonMark linting and style checking for Visual Studio Code + - eamodio.gitlens # Quickly glimpse into whom, why, and when a line or code block was changed + - EditorConfig.EditorConfig # override user/workspace settings with settings found in .editorconfig files + - Gruntfuggly.todo-tree # Display TODO and FIXME in a tree view in the activity bar + - mechatroner.rainbow-csv # Highlight columns in csv files in different colors # - nextflow.nextflow # Nextflow syntax highlighting - - oderwat.indent-rainbow # Highlight indentation level - - streetsidesoftware.code-spell-checker # Spelling checker for source code + - oderwat.indent-rainbow # Highlight indentation level + - streetsidesoftware.code-spell-checker # Spelling checker for source code diff --git a/.markdownlint.yml b/.markdownlint.yml deleted file mode 100644 index 7890d0f2..00000000 --- a/.markdownlint.yml +++ /dev/null @@ -1,11 +0,0 @@ -# Markdownlint configuration file -default: true -line-length: false -no-multiple-blanks: 0 -blanks-around-headers: false -blanks-around-lists: false -header-increment: false -no-duplicate-header: - siblings_only: true -ul-indent: - indent: 4 diff --git a/.prettierignore b/.prettierignore new file mode 100644 index 00000000..a76cd9ac --- /dev/null +++ b/.prettierignore @@ -0,0 +1,16 @@ +includes/Maven_Pro/ + +# gitignore +.nextflow* +work/ +results/ +test_output/ +output/ +.DS_Store +*.code-workspace +tests/data/ +.screenrc +.*.sw? +__pycache__ +*.pyo +*.pyc diff --git a/.prettierrc.yml b/.prettierrc.yml new file mode 100644 index 00000000..c81f9a76 --- /dev/null +++ b/.prettierrc.yml @@ -0,0 +1 @@ +printWidth: 120 diff --git a/.yamllint.yml b/.yamllint.yml deleted file mode 100644 index 6889fa34..00000000 --- a/.yamllint.yml +++ /dev/null @@ -1,5 +0,0 @@ -extends: default - -rules: - document-start: disable - line-length: disable diff --git a/README.md b/README.md index beee42e7..478fe8da 100644 --- a/README.md +++ b/README.md @@ -31,105 +31,105 @@ We have written a helper command in the `nf-core/tools` package that uses the Gi 1. Install the latest version of [`nf-core/tools`](https://github.com/nf-core/tools#installation) (`>=2.0`) 2. List the available modules: - ```console - $ nf-core modules list remote + ```console + $ nf-core modules list remote - ,--./,-. - ___ __ __ __ ___ /,-._.--~\ - |\ | |__ __ / ` / \ |__) |__ } { - | \| | \__, \__/ | \ |___ \`-._,-`-, - `._,._,' + ,--./,-. + ___ __ __ __ ___ /,-._.--~\ + |\ | |__ __ / ` / \ |__) |__ } { + | \| | \__, \__/ | \ |___ \`-._,-`-, + `._,._,' - nf-core/tools version 2.0 + nf-core/tools version 2.0 - INFO Modules available from nf-core/modules (master): pipeline_modules.py:164 + INFO Modules available from nf-core/modules (master): pipeline_modules.py:164 - ┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ - ┃ Module Name ┃ - ┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩ - │ bandage/image │ - │ bcftools/consensus │ - │ bcftools/filter │ - │ bcftools/isec │ - ..truncated.. - ``` + ┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┓ + ┃ Module Name ┃ + ┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┩ + │ bandage/image │ + │ bcftools/consensus │ + │ bcftools/filter │ + │ bcftools/isec │ + ..truncated.. + ``` 3. Install the module in your pipeline directory: - ```console - $ nf-core modules install fastqc + ```console + $ nf-core modules install fastqc - ,--./,-. - ___ __ __ __ ___ /,-._.--~\ - |\ | |__ __ / ` / \ |__) |__ } { - | \| | \__, \__/ | \ |___ \`-._,-`-, - `._,._,' + ,--./,-. + ___ __ __ __ ___ /,-._.--~\ + |\ | |__ __ / ` / \ |__) |__ } { + | \| | \__, \__/ | \ |___ \`-._,-`-, + `._,._,' - nf-core/tools version 2.0 + nf-core/tools version 2.0 - INFO Installing fastqc pipeline_modules.py:213 - INFO Downloaded 3 files to ./modules/nf-core/modules/fastqc pipeline_modules.py:236 - ``` + INFO Installing fastqc pipeline_modules.py:213 + INFO Downloaded 3 files to ./modules/nf-core/modules/fastqc pipeline_modules.py:236 + ``` 4. Import the module in your Nextflow script: - ```nextflow - #!/usr/bin/env nextflow + ```nextflow + #!/usr/bin/env nextflow - nextflow.enable.dsl = 2 + nextflow.enable.dsl = 2 - include { FASTQC } from './modules/nf-core/modules/fastqc/main' - ``` + include { FASTQC } from './modules/nf-core/modules/fastqc/main' + ``` 5. Remove the module from the pipeline repository if required: - ```console - $ nf-core modules remove fastqc + ```console + $ nf-core modules remove fastqc - ,--./,-. - ___ __ __ __ ___ /,-._.--~\ - |\ | |__ __ / ` / \ |__) |__ } { - | \| | \__, \__/ | \ |___ \`-._,-`-, - `._,._,' + ,--./,-. + ___ __ __ __ ___ /,-._.--~\ + |\ | |__ __ / ` / \ |__) |__ } { + | \| | \__, \__/ | \ |___ \`-._,-`-, + `._,._,' - nf-core/tools version 2.0 + nf-core/tools version 2.0 - INFO Removing fastqc pipeline_modules.py:271 - INFO Successfully removed fastqc pipeline_modules.py:285 - ``` + INFO Removing fastqc pipeline_modules.py:271 + INFO Successfully removed fastqc pipeline_modules.py:285 + ``` 6. Check that a locally installed nf-core module is up-to-date compared to the one hosted in this repo: - ```console - $ nf-core modules lint fastqc + ```console + $ nf-core modules lint fastqc - ,--./,-. - ___ __ __ __ ___ /,-._.--~\ - |\ | |__ __ / ` / \ |__) |__ } { - | \| | \__, \__/ | \ |___ \`-._,-`-, - `._,._,' + ,--./,-. + ___ __ __ __ ___ /,-._.--~\ + |\ | |__ __ / ` / \ |__) |__ } { + | \| | \__, \__/ | \ |___ \`-._,-`-, + `._,._,' - nf-core/tools version 2.0 + nf-core/tools version 2.0 - INFO Linting pipeline: . lint.py:104 - INFO Linting module: fastqc lint.py:106 + INFO Linting pipeline: . lint.py:104 + INFO Linting module: fastqc lint.py:106 - ╭─────────────────────────────────────────────────────────────────────────────────╮ - │ [!] 1 Test Warning │ - ╰─────────────────────────────────────────────────────────────────────────────────╯ - ╭──────────────┬───────────────────────────────┬──────────────────────────────────╮ - │ Module name │ Test message │ File path │ - ├──────────────┼───────────────────────────────┼──────────────────────────────────┤ - │ fastqc │ Local copy of module outdated │ modules/nf-core/modules/fastqc/ │ - ╰──────────────┴────────────────────────────── ┴──────────────────────────────────╯ - ╭──────────────────────╮ - │ LINT RESULTS SUMMARY │ - ├──────────────────────┤ - │ [✔] 15 Tests Passed │ - │ [!] 1 Test Warning │ - │ [✗] 0 Test Failed │ - ╰──────────────────────╯ - ``` + ╭─────────────────────────────────────────────────────────────────────────────────╮ + │ [!] 1 Test Warning │ + ╰─────────────────────────────────────────────────────────────────────────────────╯ + ╭──────────────┬───────────────────────────────┬──────────────────────────────────╮ + │ Module name │ Test message │ File path │ + ├──────────────┼───────────────────────────────┼──────────────────────────────────┤ + │ fastqc │ Local copy of module outdated │ modules/nf-core/modules/fastqc/ │ + ╰──────────────┴────────────────────────────── ┴──────────────────────────────────╯ + ╭──────────────────────╮ + │ LINT RESULTS SUMMARY │ + ├──────────────────────┤ + │ [✔] 15 Tests Passed │ + │ [!] 1 Test Warning │ + │ [✗] 0 Test Failed │ + ╰──────────────────────╯ + ``` ## Adding new modules diff --git a/modules/abricate/run/meta.yml b/modules/abricate/run/meta.yml index 2464d03e..1365bcd7 100644 --- a/modules/abricate/run/meta.yml +++ b/modules/abricate/run/meta.yml @@ -11,7 +11,7 @@ tools: documentation: https://github.com/tseemann/abricate tool_dev_url: https://github.com/tseemann/abricate doi: "" - licence: ['GPL v2', 'GPL v2'] + licence: ["GPL v2", "GPL v2"] input: - meta: diff --git a/modules/abricate/summary/meta.yml b/modules/abricate/summary/meta.yml index b02ba930..c6653b80 100644 --- a/modules/abricate/summary/meta.yml +++ b/modules/abricate/summary/meta.yml @@ -11,7 +11,7 @@ tools: documentation: https://github.com/tseemann/abricate tool_dev_url: https://github.com/tseemann/abricate doi: "" - licence: ['GPL v2', 'GPL v2'] + licence: ["GPL v2", "GPL v2"] input: - meta: diff --git a/modules/adapterremoval/meta.yml b/modules/adapterremoval/meta.yml index e395fe4a..5faad043 100644 --- a/modules/adapterremoval/meta.yml +++ b/modules/adapterremoval/meta.yml @@ -26,8 +26,7 @@ input: pattern: "*.{fq,fastq,fq.gz,fastq.gz}" - adapterlist: type: file - description: - Optional text file containing list of adapters to look for for removal + description: Optional text file containing list of adapters to look for for removal with one adapter per line. Otherwise will look for default adapters (see AdapterRemoval man page), or can be modified to remove user-specified adapters via ext.args. diff --git a/modules/amrfinderplus/run/meta.yml b/modules/amrfinderplus/run/meta.yml index b0f3b8fa..813adf3e 100644 --- a/modules/amrfinderplus/run/meta.yml +++ b/modules/amrfinderplus/run/meta.yml @@ -11,7 +11,7 @@ tools: documentation: https://github.com/ncbi/amr/wiki tool_dev_url: https://github.com/ncbi/amr doi: "10.1038/s41598-021-91456-0" - licence: ['Public Domain'] + licence: ["Public Domain"] input: - meta: diff --git a/modules/amrfinderplus/update/meta.yml b/modules/amrfinderplus/update/meta.yml index 84fee3df..0e20a92f 100644 --- a/modules/amrfinderplus/update/meta.yml +++ b/modules/amrfinderplus/update/meta.yml @@ -11,7 +11,7 @@ tools: documentation: https://github.com/ncbi/amr/wiki tool_dev_url: https://github.com/ncbi/amr doi: "10.1038/s41598-021-91456-0" - licence: ['Public Domain'] + licence: ["Public Domain"] input: - input_not_required: diff --git a/modules/ascat/meta.yml b/modules/ascat/meta.yml index b44862a1..00d86069 100644 --- a/modules/ascat/meta.yml +++ b/modules/ascat/meta.yml @@ -9,7 +9,7 @@ tools: documentation: None tool_dev_url: https://github.com/Crick-CancerGenomics/ascat doi: "10.1093/bioinformatics/btaa538" - licence: ['GPL v3'] + licence: ["GPL v3"] input: - args: diff --git a/modules/bcftools/annotate/meta.yml b/modules/bcftools/annotate/meta.yml index 3ed124d5..afe447c1 100644 --- a/modules/bcftools/annotate/meta.yml +++ b/modules/bcftools/annotate/meta.yml @@ -12,7 +12,7 @@ tools: homepage: http://samtools.github.io/bcftools/bcftools.html documentation: https://samtools.github.io/bcftools/bcftools.html#annotate doi: 10.1093/bioinformatics/btp352 - licence: ['MIT'] + licence: ["MIT"] input: - meta: diff --git a/modules/biscuit/pileup/meta.yml b/modules/biscuit/pileup/meta.yml index 399e3c2f..87c3891a 100644 --- a/modules/biscuit/pileup/meta.yml +++ b/modules/biscuit/pileup/meta.yml @@ -27,9 +27,9 @@ input: - normal_bams: type: file(s) description: | - BAM files to be analyzed. If no tumor_bam file is provided, any number of "normal" BAMs may be provided - ("normal" here is just a semantic issue, these BAMs could be from tumor or any other kind of tissue). If a - tumor BAM file is provided, exactly one normal (germline) BAM must be provided. + BAM files to be analyzed. If no tumor_bam file is provided, any number of "normal" BAMs may be provided + ("normal" here is just a semantic issue, these BAMs could be from tumor or any other kind of tissue). If a + tumor BAM file is provided, exactly one normal (germline) BAM must be provided. pattern: "*.{bam}" - normal_bais: type: file(s) @@ -38,9 +38,9 @@ input: - tumor_bam: type: file(s) description: | - Optional. If a tumor BAM file is provided, pileup will run in "somatic" mode and will annotate variants with - their somatic state (present in tumor only, present in normal only, present in both, etc). Note that if a - tumor BAM file is provided, exactly one normal BAM must be provided. + Optional. If a tumor BAM file is provided, pileup will run in "somatic" mode and will annotate variants with + their somatic state (present in tumor only, present in normal only, present in both, etc). Note that if a + tumor BAM file is provided, exactly one normal BAM must be provided. pattern: "*.{bam}" - tumor_bai: type: file(s) diff --git a/modules/cellranger/README.md b/modules/cellranger/README.md index d31735cb..312fd218 100644 --- a/modules/cellranger/README.md +++ b/modules/cellranger/README.md @@ -2,23 +2,22 @@ Cell Ranger is a commercial tool from 10X Genomics. The container provided for the cellranger nf-core module is not provided nor supported by 10x Genomics. Updating the Cell Ranger versions in the container and pushing the update to Dockerhub needs to be done manually. -1. Navigate to the appropriate download page. - - [Cell Ranger](https://support.10xgenomics.com/single-cell-gene-expression/software/downloads/latest): download the tar ball of the desired Cell Ranger version with `curl` or `wget`. Place this file in the same folder where the Dockerfile lies. +1. Navigate to the appropriate download page. - [Cell Ranger](https://support.10xgenomics.com/single-cell-gene-expression/software/downloads/latest): download the tar ball of the desired Cell Ranger version with `curl` or `wget`. Place this file in the same folder where the Dockerfile lies. 2. Edit the Dockerfile. Update the Cell Ranger versions in this line: - ```bash - ENV CELLRANGER_VER= - ``` + ```bash + ENV CELLRANGER_VER= + ``` 3. Create and test the container: - ```bash - docker build . -t nfcore/cellranger: - ``` + ```bash + docker build . -t nfcore/cellranger: + ``` 4. Access rights are needed to push the container to the Dockerhub nfcore organization, please ask a core team member to do so. - ```bash - docker push nfcore/cellranger: - ``` + ```bash + docker push nfcore/cellranger: + ``` diff --git a/modules/cellranger/mkfastq/README.md b/modules/cellranger/mkfastq/README.md index 07c3919b..e281ded3 100644 --- a/modules/cellranger/mkfastq/README.md +++ b/modules/cellranger/mkfastq/README.md @@ -2,25 +2,23 @@ Bcl2fastq2 and Cell Ranger are commercial tools from Illumina and 10X Genomics, respectively. The container provided for the cellranger nf-core module is not provided nor supported by either Illumina or 10x Genomics. Updating the bcl2fastq2 or Cell Ranger versions in the container and pushing the update to Dockerhub needs to be done manually. -1. Navigate to the appropriate download pages. - - [bcl2fastq2](https://emea.support.illumina.com/sequencing/sequencing_software/bcl2fastq-conversion-software.html): download the linux rpm installer of the desired bcl2fastq2 version with `curl` or `wget`. Place this file in the same folder where the Dockerfile lies. - - [Cell Ranger](https://support.10xgenomics.com/single-cell-gene-expression/software/downloads/latest): download the tar ball of the desired Cell Ranger version with `curl` or `wget`. Place this file in the same folder where the Dockerfile lies. +1. Navigate to the appropriate download pages. - [bcl2fastq2](https://emea.support.illumina.com/sequencing/sequencing_software/bcl2fastq-conversion-software.html): download the linux rpm installer of the desired bcl2fastq2 version with `curl` or `wget`. Place this file in the same folder where the Dockerfile lies. - [Cell Ranger](https://support.10xgenomics.com/single-cell-gene-expression/software/downloads/latest): download the tar ball of the desired Cell Ranger version with `curl` or `wget`. Place this file in the same folder where the Dockerfile lies. 2. Edit the Dockerfile. Update the bcl2fastq2 and Cell Ranger versions in this line: - ```bash - ENV BCL2FASTQ2_VER= \ - CELLRANGER_VER= - ``` + ```bash + ENV BCL2FASTQ2_VER= \ + CELLRANGER_VER= + ``` 3. Create and test the container: - ```bash - docker build . -t nfcore/cellrangermkfastq: - ``` + ```bash + docker build . -t nfcore/cellrangermkfastq: + ``` 4. Access rights are needed to push the container to the Dockerhub nfcore organization, please ask a core team member to do so. - ```bash - docker push nfcore/cellrangermkfastq: - ``` + ```bash + docker push nfcore/cellrangermkfastq: + ``` diff --git a/modules/cnvpytor/callcnvs/meta.yml b/modules/cnvpytor/callcnvs/meta.yml index c153b949..edfc462a 100644 --- a/modules/cnvpytor/callcnvs/meta.yml +++ b/modules/cnvpytor/callcnvs/meta.yml @@ -8,7 +8,7 @@ tools: documentation: https://github.com/abyzovlab/CNVpytor tool_dev_url: https://github.com/abyzovlab/CNVpytor doi: "10.1101/2021.01.27.428472v1" - licence: ['MIT'] + licence: ["MIT"] input: - meta: type: map @@ -35,6 +35,5 @@ output: description: File containing software versions pattern: "versions.yml" - authors: - "@sima-r" diff --git a/modules/cnvpytor/histogram/meta.yml b/modules/cnvpytor/histogram/meta.yml index 8484ddd4..fcad2221 100644 --- a/modules/cnvpytor/histogram/meta.yml +++ b/modules/cnvpytor/histogram/meta.yml @@ -10,7 +10,7 @@ tools: documentation: https://github.com/abyzovlab/CNVpytor tool_dev_url: https://github.com/abyzovlab/CNVpytor doi: "10.1101/2021.01.27.428472v1" - licence: ['MIT'] + licence: ["MIT"] input: - meta: diff --git a/modules/cnvpytor/importreaddepth/meta.yml b/modules/cnvpytor/importreaddepth/meta.yml index 908c3a74..1cf3c0d0 100644 --- a/modules/cnvpytor/importreaddepth/meta.yml +++ b/modules/cnvpytor/importreaddepth/meta.yml @@ -10,7 +10,7 @@ tools: documentation: https://github.com/abyzovlab/CNVpytor tool_dev_url: https://github.com/abyzovlab/CNVpytor doi: "10.1101/2021.01.27.428472v1" - licence: ['MIT'] + licence: ["MIT"] input: - meta: @@ -35,7 +35,6 @@ input: description: Index of reference fasta file pattern: "*.fai" - output: - meta: type: map diff --git a/modules/cnvpytor/partition/meta.yml b/modules/cnvpytor/partition/meta.yml index 3f0a3e21..a72cea4c 100644 --- a/modules/cnvpytor/partition/meta.yml +++ b/modules/cnvpytor/partition/meta.yml @@ -10,7 +10,7 @@ tools: documentation: https://github.com/abyzovlab/CNVpytor tool_dev_url: https://github.com/abyzovlab/CNVpytor doi: "10.1101/2021.01.27.428472v1" - licence: ['MIT'] + licence: ["MIT"] input: - meta: diff --git a/modules/controlfreec/meta.yml b/modules/controlfreec/meta.yml index 3e218037..b2a6772b 100644 --- a/modules/controlfreec/meta.yml +++ b/modules/controlfreec/meta.yml @@ -13,7 +13,7 @@ tools: documentation: http://boevalab.inf.ethz.ch/FREEC/tutorial.html tool_dev_url: https://github.com/BoevaLab/FREEC/ doi: "10.1093/bioinformatics/btq635" - licence: ['GPL >=2'] + licence: ["GPL >=2"] input: - args: @@ -131,7 +131,6 @@ input: description: Sorted bed file containing capture regions (optional) pattern: "*.bed" - output: - meta: type: map diff --git a/modules/csvtk/split/meta.yml b/modules/csvtk/split/meta.yml index 334cc6ac..64373633 100644 --- a/modules/csvtk/split/meta.yml +++ b/modules/csvtk/split/meta.yml @@ -6,8 +6,7 @@ keywords: - tsv tools: - csvtk: - description: - CSVTK is a cross-platform, efficient and practical CSV/TSV toolkit + description: CSVTK is a cross-platform, efficient and practical CSV/TSV toolkit that allows rapid data investigation and manipulation. homepage: https://bioinf.shenwei.me/csvtk/ documentation: https://bioinf.shenwei.me/csvtk/ diff --git a/modules/deeptools/bamcoverage/meta.yml b/modules/deeptools/bamcoverage/meta.yml index d0590b2a..fb92168f 100644 --- a/modules/deeptools/bamcoverage/meta.yml +++ b/modules/deeptools/bamcoverage/meta.yml @@ -9,7 +9,7 @@ tools: documentation: https://deeptools.readthedocs.io/en/develop/content/tools/bamCoverage.html tool_dev_url: https://github.com/deeptools/deepTools/ doi: "https://doi.org/10.1093/nar/gkw257" - licence: ['GPL v3'] + licence: ["GPL v3"] input: - meta: diff --git a/modules/faqcs/meta.yml b/modules/faqcs/meta.yml index 1161a13d..df3c2308 100644 --- a/modules/faqcs/meta.yml +++ b/modules/faqcs/meta.yml @@ -13,7 +13,7 @@ tools: documentation: https://github.com/LANL-Bioinformatics/FaQCs tool_dev_url: https://github.com/LANL-Bioinformatics/FaQCs doi: "https://doi.org/10.1186/s12859-014-0366-2" - licence: ['GPLv3 License'] + licence: ["GPLv3 License"] ## TODO nf-core: Add a description of all of the variables used as input input: diff --git a/modules/gatk4/combinegvcfs/meta.yml b/modules/gatk4/combinegvcfs/meta.yml index 2e0198dd..b891de90 100644 --- a/modules/gatk4/combinegvcfs/meta.yml +++ b/modules/gatk4/combinegvcfs/meta.yml @@ -8,14 +8,15 @@ keywords: - Short_Variant_Discovery tools: - gatk4: - description: Genome Analysis Toolkit (GATK4). Developed in the Data Sciences Platform at the Broad Institute, the toolkit offers a wide variety of tools + description: + Genome Analysis Toolkit (GATK4). Developed in the Data Sciences Platform at the Broad Institute, the toolkit offers a wide variety of tools with a primary focus on variant discovery and genotyping. Its powerful processing engine and high-performance computing features make it capable of taking on projects of any size. homepage: https://gatk.broadinstitute.org/hc/en-us documentation: https://gatk.broadinstitute.org/hc/en-us/articles/360037593911-CombineGVCFs tool_dev_url: https://github.com/broadinstitute/gatk doi: 10.1158/1538-7445.AM2017-3590 - licence: ['Apache-2.0'] + licence: ["Apache-2.0"] input: - fasta: diff --git a/modules/gatk4/fastqtosam/meta.yml b/modules/gatk4/fastqtosam/meta.yml index 0b173274..59e305b8 100644 --- a/modules/gatk4/fastqtosam/meta.yml +++ b/modules/gatk4/fastqtosam/meta.yml @@ -24,8 +24,7 @@ input: e.g. [ id:'test', single_end:false ] - reads: type: file - description: - List of input FastQ files of size 1 and 2 for single-end and paired-end data, + description: List of input FastQ files of size 1 and 2 for single-end and paired-end data, respectively. pattern: "*.fastq.gz" diff --git a/modules/gatk4/gatherpileupsummaries/meta.yml b/modules/gatk4/gatherpileupsummaries/meta.yml index 7885a930..2dc92d55 100644 --- a/modules/gatk4/gatherpileupsummaries/meta.yml +++ b/modules/gatk4/gatherpileupsummaries/meta.yml @@ -9,7 +9,7 @@ tools: documentation: https://gatk.broadinstitute.org/hc/en-us tool_dev_url: https://github.com/broadinstitute/gatk doi: "10.1158/1538-7445.AM2017-3590" - licence: ['BSD-3-clause'] + licence: ["BSD-3-clause"] input: - meta: diff --git a/modules/gatk4/getpileupsummaries/meta.yml b/modules/gatk4/getpileupsummaries/meta.yml index a70cf1e5..3a940dea 100644 --- a/modules/gatk4/getpileupsummaries/meta.yml +++ b/modules/gatk4/getpileupsummaries/meta.yml @@ -56,7 +56,6 @@ input: description: Index file for the germline resource. pattern: "*.vcf.gz.tbi" - output: - pileup: type: file diff --git a/modules/gatk4/variantrecalibrator/meta.yml b/modules/gatk4/variantrecalibrator/meta.yml index 92416a58..afe33d7a 100644 --- a/modules/gatk4/variantrecalibrator/meta.yml +++ b/modules/gatk4/variantrecalibrator/meta.yml @@ -52,11 +52,11 @@ input: - resvcfs: type: list description: resource files to be used as truth, training and known sites resources, this imports the files into the module, file names are specified again in the resource_labels to be called via the command. - pattern: '*/hapmap_3.3.hg38_chr21.vcf.gz' + pattern: "*/hapmap_3.3.hg38_chr21.vcf.gz" - restbis: type: list description: tbis for the corresponding vcfs files to be used as truth, training and known resources. - pattern: '*/hapmap_3.3.hg38_chr21.vcf.gz.tbi' + pattern: "*/hapmap_3.3.hg38_chr21.vcf.gz.tbi" - reslabels: type: list description: labels for the resource files to be used as truth, training and known sites resources, label should include an identifier,which kind of resource(s) it is, prior value and name of the file. diff --git a/modules/hamronization/deeparg/meta.yml b/modules/hamronization/deeparg/meta.yml index 0747700e..dc076d06 100644 --- a/modules/hamronization/deeparg/meta.yml +++ b/modules/hamronization/deeparg/meta.yml @@ -12,7 +12,7 @@ tools: documentation: https://github.com/pha4ge/hAMRonization/blob/master/README.md tool_dev_url: https://github.com/pha4ge/hAMRonization doi: "" - licence: ['GNU Lesser General Public v3 (LGPL v3)'] + licence: ["GNU Lesser General Public v3 (LGPL v3)"] input: - meta: diff --git a/modules/hamronization/summarize/meta.yml b/modules/hamronization/summarize/meta.yml index 7665c2c5..dabcd092 100644 --- a/modules/hamronization/summarize/meta.yml +++ b/modules/hamronization/summarize/meta.yml @@ -11,7 +11,7 @@ tools: documentation: https://github.com/pha4ge/hAMRonization/blob/master/README.md tool_dev_url: https://github.com/pha4ge/hAMRonization doi: "" - licence: ['GNU Lesser General Public v3 (LGPL v3)'] + licence: ["GNU Lesser General Public v3 (LGPL v3)"] input: - reports: diff --git a/modules/hmmer/hmmsearch/meta.yml b/modules/hmmer/hmmsearch/meta.yml index b315d668..3f4459ba 100644 --- a/modules/hmmer/hmmsearch/meta.yml +++ b/modules/hmmer/hmmsearch/meta.yml @@ -12,7 +12,7 @@ tools: documentation: http://hmmer.org/documentation.html tool_dev_url: https://github.com/EddyRivasLab/hmmer doi: "10.1371/journal.pcbi.1002195" - licence: ['BSD'] + licence: ["BSD"] input: - meta: diff --git a/modules/hpsuissero/meta.yml b/modules/hpsuissero/meta.yml index 2f48c6c3..55b5de39 100644 --- a/modules/hpsuissero/meta.yml +++ b/modules/hpsuissero/meta.yml @@ -11,7 +11,7 @@ tools: documentation: https://github.com/jimmyliu1326/HpsuisSero tool_dev_url: https://github.com/jimmyliu1326/HpsuisSero doi: "" - licence: ['MIT'] + licence: ["MIT"] input: - meta: diff --git a/modules/ichorcna/createpon/meta.yml b/modules/ichorcna/createpon/meta.yml index ce1eca0a..77b99915 100644 --- a/modules/ichorcna/createpon/meta.yml +++ b/modules/ichorcna/createpon/meta.yml @@ -14,7 +14,7 @@ tools: documentation: https://github.com/broadinstitute/ichorCNA/wiki tool_dev_url: https://github.com/broadinstitute/ichorCNA doi: "10.1038/s41467-017-00965-y" - licence: ['GPL v3'] + licence: ["GPL v3"] input: - wigs: diff --git a/modules/ichorcna/run/meta.yml b/modules/ichorcna/run/meta.yml index f0febddf..9b487997 100644 --- a/modules/ichorcna/run/meta.yml +++ b/modules/ichorcna/run/meta.yml @@ -13,7 +13,7 @@ tools: documentation: https://github.com/broadinstitute/ichorCNA/wiki tool_dev_url: https://github.com/broadinstitute/ichorCNA doi: "10.1038/s41467-017-00965-y" - licence: ['GPL v3'] + licence: ["GPL v3"] input: - meta: diff --git a/modules/legsta/meta.yml b/modules/legsta/meta.yml index 24013c67..e9ca3b43 100644 --- a/modules/legsta/meta.yml +++ b/modules/legsta/meta.yml @@ -10,7 +10,7 @@ tools: documentation: https://github.com/tseemann/legsta tool_dev_url: https://github.com/tseemann/legsta doi: "" - licence: ['GPL v3'] + licence: ["GPL v3"] input: - meta: diff --git a/modules/macs2/callpeak/meta.yml b/modules/macs2/callpeak/meta.yml index 974ea33a..982bc5b2 100644 --- a/modules/macs2/callpeak/meta.yml +++ b/modules/macs2/callpeak/meta.yml @@ -28,8 +28,7 @@ input: description: The control file - macs2_gsize: type: string - description: - Effective genome size. It can be 1.0e+9 or 1000000000, or shortcuts:'hs' for human (2.7e9), + description: Effective genome size. It can be 1.0e+9 or 1000000000, or shortcuts:'hs' for human (2.7e9), 'mm' for mouse (1.87e9), 'ce' for C. elegans (9e7) and 'dm' for fruitfly (1.2e8) output: diff --git a/modules/mafft/meta.yml b/modules/mafft/meta.yml index 10c7f0c2..66bb10b9 100644 --- a/modules/mafft/meta.yml +++ b/modules/mafft/meta.yml @@ -10,7 +10,7 @@ tools: documentation: https://mafft.cbrc.jp/alignment/software/manual/manual.html tool_dev_url: https://mafft.cbrc.jp/alignment/software/source.html doi: "10.1093/nar/gkf436" - licence: ['BSD'] + licence: ["BSD"] input: - meta: diff --git a/modules/mobsuite/recon/meta.yml b/modules/mobsuite/recon/meta.yml index b5232142..ef77e1e0 100644 --- a/modules/mobsuite/recon/meta.yml +++ b/modules/mobsuite/recon/meta.yml @@ -10,7 +10,7 @@ tools: documentation: https://github.com/phac-nml/mob-suite tool_dev_url: https://github.com/phac-nml/mob-suite doi: "10.1099/mgen.0.000435" - licence: ['Apache License, Version 2.0'] + licence: ["Apache License, Version 2.0"] input: - meta: diff --git a/modules/msisensorpro/msi_somatic/meta.yml b/modules/msisensorpro/msi_somatic/meta.yml index 09bc0e73..7caa1c1a 100644 --- a/modules/msisensorpro/msi_somatic/meta.yml +++ b/modules/msisensorpro/msi_somatic/meta.yml @@ -12,7 +12,7 @@ tools: documentation: https://github.com/xjtu-omics/msisensor-pro/wiki tool_dev_url: https://github.com/xjtu-omics/msisensor-pro doi: "doi.org/10.1016/j.gpb.2020.02.001" - licence: ['Custom Licence'] + licence: ["Custom Licence"] input: - meta: diff --git a/modules/msisensorpro/scan/meta.yml b/modules/msisensorpro/scan/meta.yml index 72c1b84b..27f5bc0f 100644 --- a/modules/msisensorpro/scan/meta.yml +++ b/modules/msisensorpro/scan/meta.yml @@ -11,7 +11,7 @@ tools: documentation: https://github.com/xjtu-omics/msisensor-pro/wiki tool_dev_url: https://github.com/xjtu-omics/msisensor-pro doi: "doi.org/10.1016/j.gpb.2020.02.001" - licence: ['Custom Licence'] + licence: ["Custom Licence"] input: - meta: diff --git a/modules/ngscheckmate/ncm/meta.yml b/modules/ngscheckmate/ncm/meta.yml index b8837b80..7f91f387 100644 --- a/modules/ngscheckmate/ncm/meta.yml +++ b/modules/ngscheckmate/ncm/meta.yml @@ -11,7 +11,7 @@ tools: documentation: https://github.com/parklab/NGSCheckMate tool_dev_url: https://github.com/parklab/NGSCheckMate doi: "doi:/10.1093/nar/gkx193" - licence: ['MIT'] + licence: ["MIT"] input: - files: diff --git a/modules/picard/addorreplacereadgroups/meta.yml b/modules/picard/addorreplacereadgroups/meta.yml index bdb5725c..e013bf4b 100644 --- a/modules/picard/addorreplacereadgroups/meta.yml +++ b/modules/picard/addorreplacereadgroups/meta.yml @@ -12,7 +12,7 @@ tools: homepage: https://broadinstitute.github.io/picard/ documentation: https://gatk.broadinstitute.org/hc/en-us/articles/360037226472-AddOrReplaceReadGroups-Picard- tool_dev_url: https://github.com/broadinstitute/picard - licence: ['MIT'] + licence: ["MIT"] input: - meta: diff --git a/modules/picard/createsequencedictionary/meta.yml b/modules/picard/createsequencedictionary/meta.yml index f40a4dbc..9510c590 100644 --- a/modules/picard/createsequencedictionary/meta.yml +++ b/modules/picard/createsequencedictionary/meta.yml @@ -7,11 +7,11 @@ keywords: tools: - picard: description: | - Creates a sequence dictionary file (with ".dict" extension) from a reference sequence provided in FASTA format, which is required by many processing and analysis tools. The output file contains a header but no SAMRecords, and the header contains only sequence records. + Creates a sequence dictionary file (with ".dict" extension) from a reference sequence provided in FASTA format, which is required by many processing and analysis tools. The output file contains a header but no SAMRecords, and the header contains only sequence records. homepage: https://broadinstitute.github.io/picard/ documentation: https://gatk.broadinstitute.org/hc/en-us/articles/360036712531-CreateSequenceDictionary-Picard- tool_dev_url: https://github.com/broadinstitute/picard - licence: ['MIT'] + licence: ["MIT"] input: - meta: diff --git a/modules/picard/fixmateinformation/meta.yml b/modules/picard/fixmateinformation/meta.yml index c01d803c..67dedcfa 100644 --- a/modules/picard/fixmateinformation/meta.yml +++ b/modules/picard/fixmateinformation/meta.yml @@ -10,7 +10,7 @@ tools: homepage: https://broadinstitute.github.io/picard/ documentation: https://gatk.broadinstitute.org/hc/en-us/articles/360036713471-FixMateInformation-Picard- tool_dev_url: https://github.com/broadinstitute/picard - licence: ['MIT'] + licence: ["MIT"] input: - meta: diff --git a/modules/picard/sortvcf/meta.yml b/modules/picard/sortvcf/meta.yml index a2b46d5a..f75d3401 100644 --- a/modules/picard/sortvcf/meta.yml +++ b/modules/picard/sortvcf/meta.yml @@ -8,7 +8,7 @@ tools: description: Java tools for working with NGS data in the BAM/CRAM/SAM and VCF format homepage: https://broadinstitute.github.io/picard/ documentation: https://broadinstitute.github.io/picard/command-line-overview.html#SortVcf - licence: ['MIT'] + licence: ["MIT"] input: - meta: diff --git a/modules/plink2/score/meta.yml b/modules/plink2/score/meta.yml index 5dad6259..be5fdbee 100644 --- a/modules/plink2/score/meta.yml +++ b/modules/plink2/score/meta.yml @@ -6,13 +6,13 @@ keywords: tools: - plink2: description: | - Whole genome association analysis toolset, designed to perform a range - of basic, large-scale analyses in a computationally efficient manner + Whole genome association analysis toolset, designed to perform a range + of basic, large-scale analyses in a computationally efficient manner homepage: http://www.cog-genomics.org/plink/2.0/ documentation: http://www.cog-genomics.org/plink/2.0/general_usage tool_dev_url: None doi: "10.1186/s13742-015-0047-8" - licence: ['GPL v3'] + licence: ["GPL v3"] input: - meta: diff --git a/modules/rgi/main/meta.yml b/modules/rgi/main/meta.yml index cd97ff92..1f0cb10d 100644 --- a/modules/rgi/main/meta.yml +++ b/modules/rgi/main/meta.yml @@ -11,7 +11,7 @@ tools: documentation: https://github.com/arpcard/rgi tool_dev_url: https://github.com/arpcard/rgi doi: "10.1093/nar/gkz935" - licence: ['https://card.mcmaster.ca/about'] + licence: ["https://card.mcmaster.ca/about"] input: - meta: diff --git a/modules/rsem/calculateexpression/meta.yml b/modules/rsem/calculateexpression/meta.yml index 10b54b49..8b89c7d1 100644 --- a/modules/rsem/calculateexpression/meta.yml +++ b/modules/rsem/calculateexpression/meta.yml @@ -11,7 +11,7 @@ tools: homepage: https://github.com/deweylab/RSEM documentation: https://github.com/deweylab/RSEM doi: https://doi.org/10.1186/1471-2105-12-323 - licence: ['GPL-3.0-or-later'] + licence: ["GPL-3.0-or-later"] input: - meta: type: map diff --git a/modules/seqkit/pair/meta.yml b/modules/seqkit/pair/meta.yml index 3b35d908..955cfbf2 100644 --- a/modules/seqkit/pair/meta.yml +++ b/modules/seqkit/pair/meta.yml @@ -10,7 +10,7 @@ tools: documentation: https://bioinf.shenwei.me/seqkit/usage/ tool_dev_url: https://github.com/shenwei356/seqkit/ doi: "10.1371/journal.pone.0163962" - licence: ['MIT'] + licence: ["MIT"] input: - meta: diff --git a/modules/seqtk/rename/meta.yml b/modules/seqtk/rename/meta.yml index b68dec8e..7672983c 100644 --- a/modules/seqtk/rename/meta.yml +++ b/modules/seqtk/rename/meta.yml @@ -8,7 +8,7 @@ tools: homepage: https://github.com/lh3/seqtk documentation: https://docs.csc.fi/apps/seqtk/ tool_dev_url: https://github.com/lh3/seqtk - licence: ['MIT'] + licence: ["MIT"] input: - meta: diff --git a/modules/seqtk/seq/meta.yml b/modules/seqtk/seq/meta.yml index ac32162b..3986257a 100644 --- a/modules/seqtk/seq/meta.yml +++ b/modules/seqtk/seq/meta.yml @@ -8,7 +8,7 @@ tools: homepage: https://github.com/lh3/seqtk documentation: https://docs.csc.fi/apps/seqtk/ tool_dev_url: https://github.com/lh3/seqtk - licence: ['MIT'] + licence: ["MIT"] input: - meta: diff --git a/modules/sistr/meta.yml b/modules/sistr/meta.yml index 5ce43334..db8399f6 100644 --- a/modules/sistr/meta.yml +++ b/modules/sistr/meta.yml @@ -11,7 +11,7 @@ tools: documentation: https://github.com/phac-nml/sistr_cmd tool_dev_url: https://github.com/phac-nml/sistr_cmd doi: "10.1371/journal.pone.0147101" - licence: ['Apache-2.0'] + licence: ["Apache-2.0"] input: - meta: diff --git a/modules/sourmash/sketch/meta.yml b/modules/sourmash/sketch/meta.yml index 9fb552bc..083774b5 100644 --- a/modules/sourmash/sketch/meta.yml +++ b/modules/sourmash/sketch/meta.yml @@ -10,7 +10,7 @@ tools: documentation: https://sourmash.readthedocs.io/ tool_dev_url: https://github.com/dib-lab/sourmash doi: "10.1186/s13059-016-0997-x" - licence: ['BSD-3-clause'] + licence: ["BSD-3-clause"] input: - meta: diff --git a/modules/ssuissero/meta.yml b/modules/ssuissero/meta.yml index 2c0031e6..8418e049 100644 --- a/modules/ssuissero/meta.yml +++ b/modules/ssuissero/meta.yml @@ -11,7 +11,7 @@ tools: documentation: https://github.com/jimmyliu1326/SsuisSero tool_dev_url: https://github.com/jimmyliu1326/SsuisSero doi: "" - licence: ['MIT'] + licence: ["MIT"] input: - meta: diff --git a/modules/stranger/meta.yml b/modules/stranger/meta.yml index a9a280ad..61ebc7a9 100644 --- a/modules/stranger/meta.yml +++ b/modules/stranger/meta.yml @@ -12,7 +12,7 @@ tools: documentation: https://github.com/moonso/stranger tool_dev_url: https://github.com/moonso/stranger doi: "10.5281/zenodo.4548873" - licence: ['MIT'] + licence: ["MIT"] input: - meta: diff --git a/modules/yara/index/meta.yml b/modules/yara/index/meta.yml index bdd6bf9a..bfc74f0a 100644 --- a/modules/yara/index/meta.yml +++ b/modules/yara/index/meta.yml @@ -13,10 +13,7 @@ tools: documentation: https://github.com/seqan/seqan tool_dev_url: https://github.com/seqan/seqan doi: "" - licence: - [ - "https://raw.githubusercontent.com/seqan/seqan/develop/apps/yara/LICENSE", - ] + licence: ["https://raw.githubusercontent.com/seqan/seqan/develop/apps/yara/LICENSE"] input: - fasta: diff --git a/modules/yara/mapper/meta.yml b/modules/yara/mapper/meta.yml index 188e1d52..0c4c0d43 100644 --- a/modules/yara/mapper/meta.yml +++ b/modules/yara/mapper/meta.yml @@ -11,10 +11,7 @@ tools: documentation: https://github.com/seqan/seqan tool_dev_url: https://github.com/seqan/seqan doi: "" - licence: - [ - "https://raw.githubusercontent.com/seqan/seqan/develop/apps/yara/LICENSE", - ] + licence: ["https://raw.githubusercontent.com/seqan/seqan/develop/apps/yara/LICENSE"] input: - meta: diff --git a/tests/modules/gatk4/combinegvcfs/test.yml b/tests/modules/gatk4/combinegvcfs/test.yml index 72d8a6a5..54948668 100644 --- a/tests/modules/gatk4/combinegvcfs/test.yml +++ b/tests/modules/gatk4/combinegvcfs/test.yml @@ -5,6 +5,6 @@ - gatk4/combinegvcfs files: - path: output/gatk4/test.combined.g.vcf.gz - contains: ['VCFv4.2'] + contains: ["VCFv4.2"] - path: output/gatk4/versions.yml md5sum: 49d9c467f84b6a99a4da3ef161af26bd diff --git a/tests/modules/gatk4/genotypegvcfs/test.yml b/tests/modules/gatk4/genotypegvcfs/test.yml index a49673fd..dff79460 100644 --- a/tests/modules/gatk4/genotypegvcfs/test.yml +++ b/tests/modules/gatk4/genotypegvcfs/test.yml @@ -57,10 +57,7 @@ - gatk4/genotypegvcfs files: - path: output/gatk4/test.genotyped.vcf.gz - contains: - [ - "AC=2;AF=1.00;AN=2;DB;DP=20;ExcessHet=0.0000;FS=0.000;MLEAC=2;MLEAF=1.00;MQ=60.00;QD=24.05;SOR=0.693", - ] + contains: ["AC=2;AF=1.00;AN=2;DB;DP=20;ExcessHet=0.0000;FS=0.000;MLEAC=2;MLEAF=1.00;MQ=60.00;QD=24.05;SOR=0.693"] - path: output/gatk4/test.genotyped.vcf.gz.tbi - name: gatk4 genotypegvcfs test_gatk4_genotypegvcfs_gendb_input @@ -109,8 +106,5 @@ - gatk4/genotypegvcfs files: - path: output/gatk4/test.genotyped.vcf.gz - contains: - [ - "AC=2;AF=1.00;AN=2;DP=2;ExcessHet=0.0000;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=60.00;QD=18.66;SOR=0.693", - ] + contains: ["AC=2;AF=1.00;AN=2;DP=2;ExcessHet=0.0000;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=60.00;QD=18.66;SOR=0.693"] - path: output/gatk4/test.genotyped.vcf.gz.tbi diff --git a/tests/modules/hmmer/hmmsearch/test.yml b/tests/modules/hmmer/hmmsearch/test.yml index 91e4775c..dd3e16ef 100644 --- a/tests/modules/hmmer/hmmsearch/test.yml +++ b/tests/modules/hmmer/hmmsearch/test.yml @@ -6,7 +6,7 @@ files: - path: output/hmmer/test.txt.gz contains: - - '[ok]' + - "[ok]" - path: output/hmmer/versions.yml md5sum: ed0808c10abd205c6bd0fb01f45259bb @@ -20,12 +20,12 @@ md5sum: d3121aa33455074c566fb7f8fdcda7b0 - path: output/hmmer/test.domtbl.gz contains: - - '# [ok]' + - "# [ok]" - path: output/hmmer/test.tbl.gz contains: - - '# [ok]' + - "# [ok]" - path: output/hmmer/test.txt.gz contains: - - '[ok]' + - "[ok]" - path: output/hmmer/versions.yml md5sum: ebdcb08ae540e840f7b5c4c75a3a2993 diff --git a/tests/modules/ichorcna/createpon/test.yml b/tests/modules/ichorcna/createpon/test.yml index 53422b78..c8fe63c8 100644 --- a/tests/modules/ichorcna/createpon/test.yml +++ b/tests/modules/ichorcna/createpon/test.yml @@ -5,7 +5,7 @@ - ichorcna files: - path: output/ichorcna/PoN_median.txt - contains: ['seqnames'] + contains: ["seqnames"] - path: output/ichorcna/versions.yml md5sum: 59a2121301113cc013bfae65935e07f1 @@ -16,6 +16,6 @@ - ichorcna files: - path: output/ichorcna/PoN_median.txt - contains: ['seqnames'] + contains: ["seqnames"] - path: output/ichorcna/versions.yml md5sum: 31a5fcc0075dbe747f7736efbdb99644 diff --git a/tests/modules/ichorcna/run/test.yml b/tests/modules/ichorcna/run/test.yml index af78e4b3..4be83774 100644 --- a/tests/modules/ichorcna/run/test.yml +++ b/tests/modules/ichorcna/run/test.yml @@ -5,7 +5,7 @@ - ichorcna/run files: - path: output/ichorcna/test.cna.seg - contains: ['Corrected_Copy_Number'] + contains: ["Corrected_Copy_Number"] - path: output/ichorcna/test.params.txt md5sum: e39a579cdcc9576679f06dc5c22605a7 - path: output/ichorcna/versions.yml @@ -18,7 +18,7 @@ - ichorcna/run files: - path: output/ichorcna/test.cna.seg - contains: ['Corrected_Copy_Number'] + contains: ["Corrected_Copy_Number"] - path: output/ichorcna/test.params.txt md5sum: 0b97e0269cd0b571f5a85890f6ddb181 - path: output/ichorcna/versions.yml diff --git a/tests/modules/picard/createsequencedictionary/test.yml b/tests/modules/picard/createsequencedictionary/test.yml index 3c9d0e7e..2a43be41 100644 --- a/tests/modules/picard/createsequencedictionary/test.yml +++ b/tests/modules/picard/createsequencedictionary/test.yml @@ -5,6 +5,6 @@ - picard files: - path: output/picard/test.dict - contains: ['SN:MT192765.1'] + contains: ["SN:MT192765.1"] - path: output/picard/versions.yml md5sum: b3d8c7ea65b8a6d3237b153d13fe2014 diff --git a/tests/modules/roary/test.yml b/tests/modules/roary/test.yml index 11bdf2c7..d844a430 100644 --- a/tests/modules/roary/test.yml +++ b/tests/modules/roary/test.yml @@ -26,10 +26,7 @@ - path: output/roary/results/gene_presence_absence.Rtab contains: ["Gene"] - path: output/roary/results/gene_presence_absence.csv - contains: - [ - '"Gene","Non-unique Gene name","Annotation","No. isolates","No. sequences"', - ] + contains: ['"Gene","Non-unique Gene name","Annotation","No. isolates","No. sequences"'] - path: output/roary/results/number_of_conserved_genes.Rtab contains: ["279"] - path: output/roary/results/number_of_genes_in_pan_genome.Rtab From ea41a8a6f761b9993d857570e872abaae3fea555 Mon Sep 17 00:00:00 2001 From: Sofia Stamouli <91951607+sofstam@users.noreply.github.com> Date: Fri, 18 Mar 2022 15:39:27 +0100 Subject: [PATCH 045/283] Add centrifuge module (#1410) * Add centrifuge module * Add centrifuge module * Add centrifuge module * Add centrifuge module --- modules/centrifuge/main.nf | 63 ++++++++++++++++++++ modules/centrifuge/meta.yml | 73 ++++++++++++++++++++++++ tests/config/pytest_modules.yml | 12 ++-- tests/modules/centrifuge/main.nf | 33 +++++++++++ tests/modules/centrifuge/nextflow.config | 5 ++ tests/modules/centrifuge/test.yml | 22 +++++++ 6 files changed, 204 insertions(+), 4 deletions(-) create mode 100644 modules/centrifuge/main.nf create mode 100644 modules/centrifuge/meta.yml create mode 100644 tests/modules/centrifuge/main.nf create mode 100644 tests/modules/centrifuge/nextflow.config create mode 100644 tests/modules/centrifuge/test.yml diff --git a/modules/centrifuge/main.nf b/modules/centrifuge/main.nf new file mode 100644 index 00000000..7eb566da --- /dev/null +++ b/modules/centrifuge/main.nf @@ -0,0 +1,63 @@ +process CENTRIFUGE { + tag "$meta.id" + label 'process_high' + + conda (params.enable_conda ? "bioconda::centrifuge=1.0.4_beta" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/centrifuge:1.0.4_beta--h9a82719_6' : + 'quay.io/biocontainers/centrifuge:1.0.4_beta--h9a82719_6' }" + + input: + tuple val(meta), path(reads) + path db + val save_unaligned + val save_aligned + val sam_format + + output: + tuple val(meta), path('*report.txt') , emit: report + tuple val(meta), path('*results.txt') , emit: results + tuple val(meta), path('*kreport.txt') , emit: kreport + tuple val(meta), path('*.sam') , optional: true, emit: sam + tuple val(meta), path('*.mapped.fastq{,.1,.2}.gz') , optional: true, emit: fastq_mapped + tuple val(meta), path('*.unmapped.fastq{,.1,.2}.gz') , optional: true, emit: fastq_unmapped + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def paired = meta.single_end ? "-U ${reads}" : "-1 ${reads[0]} -2 ${reads[1]}" + def db_name = db.toString().replace(".tar.gz","") + def unaligned = '' + def aligned = '' + if (meta.single_end) { + unaligned = save_unaligned ? "--un-gz ${prefix}.unmapped.fastq.gz" : '' + aligned = save_aligned ? "--al-gz ${prefix}.mapped.fastq.gz" : '' + } else { + unaligned = save_unaligned ? "--un-conc-gz ${prefix}.unmapped.fastq.gz" : '' + aligned = save_aligned ? "--al-conc-gz ${prefix}.mapped.fastq.gz" : '' + } + def sam_output = sam_format ? "--out-fmt 'sam'" : '' + """ + tar -xf $db + centrifuge \\ + -x $db_name \\ + -p $task.cpus \\ + $paired \\ + --report-file ${prefix}.report.txt \\ + -S ${prefix}.results.txt \\ + $unaligned \\ + $aligned \\ + $sam_output \\ + $args + centrifuge-kreport -x $db_name ${prefix}.results.txt > ${prefix}.kreport.txt + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + centrifuge: \$( centrifuge --version | sed -n 1p | sed 's/^.*centrifuge-class version //') + END_VERSIONS + """ +} diff --git a/modules/centrifuge/meta.yml b/modules/centrifuge/meta.yml new file mode 100644 index 00000000..3adf0e23 --- /dev/null +++ b/modules/centrifuge/meta.yml @@ -0,0 +1,73 @@ +name: centrifuge +description: Classifies metagenomic sequence data +keywords: + - classify + - metagenomics + - fastq + - db +tools: + - centrifuge: + description: Centrifuge is a classifier for metagenomic sequences. + homepage: https://ccb.jhu.edu/software/centrifuge/ + documentation: https://ccb.jhu.edu/software/centrifuge/manual.shtml + doi: 10.1101/gr.210641.116 + licence: ["GPL v3"] +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: | + List of input FastQ files of size 1 and 2 for single-end and paired-end data, + respectively. + - db: + type: directory + description: Centrifuge database in .tar.gz format + pattern: "*.tar.gz" + - save_unaligned: + type: value + description: If true unmapped fastq files are saved + - save_aligned: + type: value + description: If true mapped fastq files are saved +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - report: + type: file + description: | + File containing a classification summary + pattern: "*.{report.txt}" + - results: + type: file + description: | + File containing classification results + pattern: "*.{results.txt}" + - kreport: + type: file + description: | + File containing kraken-style report from centrifuge + out files. + pattern: "*.{kreport.txt}" + - fastq_unmapped: + type: file + description: Unmapped fastq files + pattern: "*.unmapped.fastq.gz" + - fastq_mapped: + type: file + description: Mapped fastq files + pattern: "*.mapped.fastq.gz" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" +authors: + - "@sofstam" + - "@jfy133" + - "@sateeshperi" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 48c3bb7d..ea17ce2e 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -202,6 +202,10 @@ bedtools/subtract: - modules/bedtools/subtract/** - tests/modules/bedtools/subtract/** +biobambam/bammarkduplicates2: + - modules/biobambam/bammarkduplicates2/** + - tests/modules/biobambam/bammarkduplicates2/** + biscuit/align: - modules/biscuit/index/** - modules/biscuit/align/** @@ -245,10 +249,6 @@ biscuit/vcf2bed: - modules/biscuit/vcf2bed/** - tests/modules/biscuit/vcf2bed/** -biobambam/bammarkduplicates2: - - modules/biobambam/bammarkduplicates2/** - - tests/modules/biobambam/bammarkduplicates2/** - bismark/align: - modules/bismark/align/** - modules/bismark/genomepreparation/** @@ -379,6 +379,10 @@ cellranger/mkref: - modules/cellranger/gtf/** - tests/modules/cellranger/gtf/** +centrifuge: + - modules/centrifuge/** + - tests/modules/centrifuge/** + checkm/lineagewf: - modules/checkm/lineagewf/** - tests/modules/checkm/lineagewf/** diff --git a/tests/modules/centrifuge/main.nf b/tests/modules/centrifuge/main.nf new file mode 100644 index 00000000..a8eb2fcb --- /dev/null +++ b/tests/modules/centrifuge/main.nf @@ -0,0 +1,33 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { CENTRIFUGE } from '../../../modules/centrifuge/main.nf' + +workflow test_centrifuge_single_end { + input = [ [ id:'test', single_end:true ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] + ] + db = file("https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/delete_me/minigut_cf.tar.gz", checkIfExists: true) + save_unaligned = true + save_aligned = false + sam_format = false + + CENTRIFUGE ( input, db, save_unaligned, save_aligned, sam_format ) + +} + +workflow test_centrifuge_paired_end { + input = [ [ id:'test', single_end:false ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] + ] + db = file("https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/delete_me/minigut_cf.tar.gz", checkIfExists: true) + save_unaligned = true + save_aligned = false + sam_format = false + + CENTRIFUGE ( input, db, save_unaligned, save_aligned, sam_format ) + + +} diff --git a/tests/modules/centrifuge/nextflow.config b/tests/modules/centrifuge/nextflow.config new file mode 100644 index 00000000..50f50a7a --- /dev/null +++ b/tests/modules/centrifuge/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} \ No newline at end of file diff --git a/tests/modules/centrifuge/test.yml b/tests/modules/centrifuge/test.yml new file mode 100644 index 00000000..a7b4360b --- /dev/null +++ b/tests/modules/centrifuge/test.yml @@ -0,0 +1,22 @@ +- name: centrifuge test_centrifuge_single_end + command: nextflow run tests/modules/centrifuge -entry test_centrifuge_single_end -c tests/config/nextflow.config + tags: + - centrifuge + files: + - path: output/centrifuge/test.kreport.txt + - path: output/centrifuge/test.report.txt + - path: output/centrifuge/test.results.txt + - path: output/centrifuge/test.unmapped.fastq.gz + - path: output/centrifuge/versions.yml + +- name: centrifuge test_centrifuge_paired_end + command: nextflow run tests/modules/centrifuge -entry test_centrifuge_paired_end -c tests/config/nextflow.config + tags: + - centrifuge + files: + - path: output/centrifuge/test.kreport.txt + - path: output/centrifuge/test.report.txt + - path: output/centrifuge/test.results.txt + - path: output/centrifuge/test.unmapped.fastq.1.gz + - path: output/centrifuge/test.unmapped.fastq.2.gz + - path: output/centrifuge/versions.yml From 15c7190e2271de7ff347940460a484f69e27a106 Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Fri, 18 Mar 2022 16:21:55 +0100 Subject: [PATCH 046/283] Deeparg singularity container fix 2 (#1406) * fix: remove left-over unnecessary code * Switch to more portable solution for singularity container issue by using bind paths * Fix input collision of dummy files * Repalce dummy with which bash * Remove dummy usage from tests * Apply suggestions from code review * Fix singularity typo --- modules/deeparg/downloaddata/main.nf | 6 +++--- modules/deeparg/downloaddata/meta.yml | 2 +- modules/deeparg/predict/main.nf | 8 ++++---- tests/modules/deeparg/downloaddata/main.nf | 2 +- tests/modules/deeparg/predict/main.nf | 2 +- 5 files changed, 10 insertions(+), 10 deletions(-) diff --git a/modules/deeparg/downloaddata/main.nf b/modules/deeparg/downloaddata/main.nf index 78208d21..4e007f14 100644 --- a/modules/deeparg/downloaddata/main.nf +++ b/modules/deeparg/downloaddata/main.nf @@ -8,10 +8,10 @@ process DEEPARG_DOWNLOADDATA { 'https://depot.galaxyproject.org/singularity/deeparg:1.0.2--pyhdfd78af_1' : 'quay.io/biocontainers/deeparg:1.0.2--pyhdfd78af_1' }" /* - We have to force singularity to run with --fakeroot to allow reading of a problematic file with borked read-write permissions in an upstream dependency (theanos). - This flag may not be available on all systems and may be considered a security problem. so please document and /or warn for this in your pipeline! + We have to force singularity to run with -B to allow reading of a problematic file with borked read-write permissions in an upstream dependency (theanos). + Original report: https://github.com/nf-core/funcscan/issues/23 */ - containerOptions { "${workflow.containerEngine}" == 'singularity' ? '--fakeroot' : '' } + containerOptions { "${workflow.containerEngine}" == 'singularity' ? '-B $(which bash):/usr/local/lib/python2.7/site-packages/Theano-0.8.2-py2.7.egg-info/PKG-INFO' : '' } input: diff --git a/modules/deeparg/downloaddata/meta.yml b/modules/deeparg/downloaddata/meta.yml index 352999e2..b6b1881e 100644 --- a/modules/deeparg/downloaddata/meta.yml +++ b/modules/deeparg/downloaddata/meta.yml @@ -17,7 +17,7 @@ tools: licence: ["MIT"] input: - - none: There is no input. This module downloads a pre-built database for use with deepARG. + - none: No input required for download module. output: - versions: diff --git a/modules/deeparg/predict/main.nf b/modules/deeparg/predict/main.nf index 05cee2f8..7bb4f935 100644 --- a/modules/deeparg/predict/main.nf +++ b/modules/deeparg/predict/main.nf @@ -6,13 +6,13 @@ process DEEPARG_PREDICT { conda (params.enable_conda ? "bioconda::deeparg=1.0.2" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity//deeparg:1.0.2--pyhdfd78af_1' : + 'https://depot.galaxyproject.org/singularity/deeparg:1.0.2--pyhdfd78af_1' : 'quay.io/biocontainers/deeparg:1.0.2--pyhdfd78af_1' }" /* - We have to force singularity to run with --fakeroot to allow reading of a problematic file with borked read-write permissions in an upstream dependency (theanos). - This flag may not be available on all systems and may be considered a security problem. so please document and /or warn for this in your pipeline! + We have to force singularity to run with -B to allow reading of a problematic file with borked read-write permissions in an upstream dependency (theanos). + Original report: https://github.com/nf-core/funcscan/issues/23 */ - containerOptions { "${workflow.containerEngine}" == 'singularity' ? '--fakeroot' : '' } + containerOptions { "${workflow.containerEngine}" == 'singularity' ? '-B $(which bash):/usr/local/lib/python2.7/site-packages/Theano-0.8.2-py2.7.egg-info/PKG-INFO' : '' } input: tuple val(meta), path(fasta), val(model) diff --git a/tests/modules/deeparg/downloaddata/main.nf b/tests/modules/deeparg/downloaddata/main.nf index ed2d48bb..1074b0bc 100644 --- a/tests/modules/deeparg/downloaddata/main.nf +++ b/tests/modules/deeparg/downloaddata/main.nf @@ -5,5 +5,5 @@ nextflow.enable.dsl = 2 include { DEEPARG_DOWNLOADDATA } from '../../../../modules/deeparg/downloaddata/main.nf' workflow test_deeparg_downloaddata { - DEEPARG_DOWNLOADDATA () + DEEPARG_DOWNLOADDATA ( ) } diff --git a/tests/modules/deeparg/predict/main.nf b/tests/modules/deeparg/predict/main.nf index 2758ab58..2ada2374 100644 --- a/tests/modules/deeparg/predict/main.nf +++ b/tests/modules/deeparg/predict/main.nf @@ -13,7 +13,7 @@ workflow test_deeparg_predict { 'LS' ] - DEEPARG_DOWNLOADDATA() + DEEPARG_DOWNLOADDATA( ) DEEPARG_PREDICT ( input, DEEPARG_DOWNLOADDATA.out.db ) } From f425aa3cea10015fe9b345b9d6dcc2336b53155f Mon Sep 17 00:00:00 2001 From: Harshil Patel Date: Fri, 18 Mar 2022 20:21:41 +0000 Subject: [PATCH 047/283] Add outputs for umitools dedup summary stats (#1422) --- modules/umitools/dedup/main.nf | 13 +++++++++---- modules/umitools/dedup/meta.yml | 12 ++++++++++++ 2 files changed, 21 insertions(+), 4 deletions(-) diff --git a/modules/umitools/dedup/main.nf b/modules/umitools/dedup/main.nf index 1e46a612..dfcbcf2f 100644 --- a/modules/umitools/dedup/main.nf +++ b/modules/umitools/dedup/main.nf @@ -11,8 +11,11 @@ process UMITOOLS_DEDUP { tuple val(meta), path(bam), path(bai) output: - tuple val(meta), path("*.bam"), emit: bam - path "versions.yml" , emit: versions + tuple val(meta), path("*.bam") , emit: bam + tuple val(meta), path("*edit_distance.tsv"), emit: tsv_edit_distance + tuple val(meta), path("*per_umi.tsv") , emit: tsv_per_umi + tuple val(meta), path("*per_position.tsv") , emit: tsv_umi_per_position + path "versions.yml" , emit: versions when: task.ext.when == null || task.ext.when @@ -20,11 +23,13 @@ process UMITOOLS_DEDUP { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" - def paired = meta.single_end ? "" : "--paired" + def paired = meta.single_end ? "" : "--paired" """ - umi_tools dedup \\ + umi_tools \\ + dedup \\ -I $bam \\ -S ${prefix}.bam \\ + --output-stats $prefix \\ $paired \\ $args diff --git a/modules/umitools/dedup/meta.yml b/modules/umitools/dedup/meta.yml index 2038b40d..eee8952f 100644 --- a/modules/umitools/dedup/meta.yml +++ b/modules/umitools/dedup/meta.yml @@ -36,6 +36,18 @@ output: type: file description: BAM file with deduplicated UMIs. pattern: "*.{bam}" + - tsv_edit_distance: + type: file + description: Reports the (binned) average edit distance between the UMIs at each position. + pattern: "*edit_distance.tsv" + - tsv_per_umi: + type: file + description: UMI-level summary statistics. + pattern: "*per_umi.tsv" + - tsv_umi_per_position: + type: file + description: Tabulates the counts for unique combinations of UMI and position. + pattern: "*per_position.tsv" - versions: type: file description: File containing software versions From 16096aba179d979f3b98bb412dea6542c14465ee Mon Sep 17 00:00:00 2001 From: "Robert A. Petit III" Date: Sat, 19 Mar 2022 09:06:21 -0600 Subject: [PATCH 048/283] Update test.yml (#1419) --- tests/modules/roary/test.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/modules/roary/test.yml b/tests/modules/roary/test.yml index d844a430..b6f02593 100644 --- a/tests/modules/roary/test.yml +++ b/tests/modules/roary/test.yml @@ -28,12 +28,12 @@ - path: output/roary/results/gene_presence_absence.csv contains: ['"Gene","Non-unique Gene name","Annotation","No. isolates","No. sequences"'] - path: output/roary/results/number_of_conserved_genes.Rtab - contains: ["279"] + contains: ["2"] - path: output/roary/results/number_of_genes_in_pan_genome.Rtab - contains: ["279"] + contains: ["2"] - path: output/roary/results/number_of_new_genes.Rtab - contains: ["279"] + contains: ["2"] - path: output/roary/results/number_of_unique_genes.Rtab - contains: ["279"] + contains: ["2"] - path: output/roary/results/summary_statistics.txt md5sum: 3921b5445df6a7ed59408119b8860a58 From e69a4ce4ed0e3c858f112ffe70951eedf253a3e6 Mon Sep 17 00:00:00 2001 From: Mahesh Binzer-Panchal Date: Sat, 19 Mar 2022 19:38:37 +0100 Subject: [PATCH 049/283] Add pytest.ini (#1401) --- pytest.ini | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 pytest.ini diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 00000000..02a8acac --- /dev/null +++ b/pytest.ini @@ -0,0 +1,3 @@ +[pytest] +filterwarnings = + ignore::pytest.PytestRemovedIn8Warning:_pytest.nodes:140 From 5297d27fbf50b7aa5a37cce9b85c7aac3ff7c4ff Mon Sep 17 00:00:00 2001 From: Ramprasad Neethiraj <20065894+ramprasadn@users.noreply.github.com> Date: Mon, 21 Mar 2022 10:39:34 +0100 Subject: [PATCH 050/283] update svdb to version 2.5.2 (#1390) * update svdb version and tests * update link --- modules/svdb/merge/main.nf | 6 +-- modules/svdb/query/main.nf | 55 ++++++++++++++++++++---- modules/svdb/query/meta.yml | 14 +++++- tests/config/test_data.config | 1 + tests/modules/svdb/query/main.nf | 26 ++++++++++- tests/modules/svdb/query/nextflow.config | 4 -- tests/modules/svdb/query/test.yml | 10 ++++- 7 files changed, 97 insertions(+), 19 deletions(-) diff --git a/modules/svdb/merge/main.nf b/modules/svdb/merge/main.nf index 505e2c0b..58aef652 100644 --- a/modules/svdb/merge/main.nf +++ b/modules/svdb/merge/main.nf @@ -2,10 +2,10 @@ process SVDB_MERGE { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? "bioconda::svdb=2.5.0" : null) + conda (params.enable_conda ? "bioconda::svdb=2.5.2" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/svdb:2.5.0--py39hcbe4a3b_0': - 'quay.io/biocontainers/svdb:2.5.0--py39hcbe4a3b_0' }" + 'https://depot.galaxyproject.org/singularity/svdb:2.5.2--py39h5371cbf_0': + 'quay.io/biocontainers/svdb:2.5.2--py39h5371cbf_0' }" input: tuple val(meta), path(vcfs) diff --git a/modules/svdb/query/main.nf b/modules/svdb/query/main.nf index 292fe4ce..37ce432c 100644 --- a/modules/svdb/query/main.nf +++ b/modules/svdb/query/main.nf @@ -2,36 +2,73 @@ process SVDB_QUERY { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? "bioconda::svdb=2.5.0" : null) + conda (params.enable_conda ? "bioconda::svdb=2.5.2" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/svdb:2.5.0--py39hcbe4a3b_0': - 'quay.io/biocontainers/svdb:2.5.0--py39hcbe4a3b_0' }" + 'https://depot.galaxyproject.org/singularity/svdb:2.5.2--py39h5371cbf_0': + 'quay.io/biocontainers/svdb:2.5.2--py39h5371cbf_0' }" input: tuple val(meta), path(vcf) - path (vcf_db) + val(in_occs) + val(in_frqs) + val(out_occs) + val(out_frqs) + path (vcf_dbs) output: - tuple val(meta), path("*_ann_svdbq.vcf"), emit: vcf + tuple val(meta), path("*_query.vcf"), emit: vcf path "versions.yml" , emit: versions when: task.ext.when == null || task.ext.when script: - def args = task.ext.args ?: '' - def prefix = task.ext.prefix ?: "${meta.id}" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def in_occ = "" + def in_frq = "" + def out_occ = "" + def out_frq = "" + if (in_occs) { + in_occ = "--in_occ ${in_occs.join(',')}" + } + if (in_frqs) { + in_frq = "--in_frq ${in_frqs.join(',')}" + } + if (out_occs) { + out_occ = "--out_occ ${out_occs.join(',')}" + } + if (out_frqs) { + out_frq = "--out_frq ${out_frqs.join(',')}" + } + """ svdb \\ --query \\ + $in_occ \\ + $in_frq \\ + $out_occ \\ + $out_frq \\ $args \\ - --db $vcf_db \\ + --db ${vcf_dbs.join(',')} \\ --query_vcf $vcf \\ - >${prefix}_ann_svdbq.vcf + --prefix ${prefix} cat <<-END_VERSIONS > versions.yml "${task.process}": svdb: \$( echo \$(svdb) | head -1 | sed 's/usage: SVDB-\\([0-9]\\.[0-9]\\.[0-9]\\).*/\\1/' ) END_VERSIONS """ + + stub: + def prefix = task.ext.prefix ?: "${meta.id}" + """ + touch ${prefix}_query.vcf + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + svdb: \$( echo \$(svdb) | head -1 | sed 's/usage: SVDB-\\([0-9]\\.[0-9]\\.[0-9]\\).*/\\1/' ) + END_VERSIONS + """ + } diff --git a/modules/svdb/query/meta.yml b/modules/svdb/query/meta.yml index e2a9e456..57e67e15 100644 --- a/modules/svdb/query/meta.yml +++ b/modules/svdb/query/meta.yml @@ -15,6 +15,12 @@ input: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] + - in_occs: + type: list + description: A list of allele count tags + - in_frqs: + type: list + description: A list of allele frequency tags - vcf: type: file description: query vcf file @@ -34,10 +40,16 @@ output: type: file description: File containing software versions pattern: "versions.yml" + - out_occs: + type: list + description: A list of allele count tags + - out_frqs: + type: list + description: A list of allele frequency tags - vcf: type: file description: Annotated output VCF file - pattern: "*_ann_svdbq.vcf" + pattern: "*_query.vcf" authors: - "@ramprasadn" diff --git a/tests/config/test_data.config b/tests/config/test_data.config index 0d61d1e9..45732f47 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -140,6 +140,7 @@ params { syntheticvcf_short_vcf_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/syntheticvcf_short.vcf.gz.tbi" syntheticvcf_short_score = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/syntheticvcf_short.score" gnomad_r2_1_1_sv_vcf_gz = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/gnomAD.r2.1.1-sv.vcf.gz" + gnomad2_r2_1_1_sv_vcf_gz = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/gnomAD2.r2.1.1-sv.vcf.gz" hapmap_3_3_hg38_21_vcf_gz = "${test_data_dir}/genomics/homo_sapiens/genome/chr21/germlineresources/hapmap_3.3.hg38.vcf.gz" hapmap_3_3_hg38_21_vcf_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/genome/chr21/germlineresources/hapmap_3.3.hg38.vcf.gz.tbi" diff --git a/tests/modules/svdb/query/main.nf b/tests/modules/svdb/query/main.nf index 972f99e1..c014320f 100644 --- a/tests/modules/svdb/query/main.nf +++ b/tests/modules/svdb/query/main.nf @@ -14,5 +14,29 @@ workflow test_svdb_query { file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_sv_vcf_gz'], checkIfExists: true) ] - SVDB_QUERY ( input, vcf_db ) + in_occs = ['AC'] + in_frqs = ['AF'] + out_occs = ['gnomad_svAC'] + out_frqs = ['gnomad_svAF'] + + SVDB_QUERY ( input, in_occs, in_frqs, out_occs, out_frqs, vcf_db ) +} + +workflow test_svdb_query_multiple { + + input = [ [ id:'test' ], // meta map + [ file(params.test_data['homo_sapiens']['illumina']['test_sv_vcf'], checkIfExists: true) ] + ] + + vcf_db = [ + file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_sv_vcf_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['genome']['gnomad2_r2_1_1_sv_vcf_gz'], checkIfExists: true) + ] + + in_occs = ['AC','AC'] + in_frqs = ['AF','AF'] + out_occs = ['gnomad_svAC','gnomad_svAC'] + out_frqs = ['gnomad_svAF','gnomad_svAF'] + + SVDB_QUERY ( input, in_occs, in_frqs, out_occs, out_frqs, vcf_db ) } diff --git a/tests/modules/svdb/query/nextflow.config b/tests/modules/svdb/query/nextflow.config index 2a6c9d90..8730f1c4 100644 --- a/tests/modules/svdb/query/nextflow.config +++ b/tests/modules/svdb/query/nextflow.config @@ -2,8 +2,4 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } - withName: SVDB_QUERY { - ext.args = '--in_occ AC --out_occ gnomad_svAC --in_frq AF --out_frq gnomad_svAF' - } - } diff --git a/tests/modules/svdb/query/test.yml b/tests/modules/svdb/query/test.yml index b95ecafe..a7cbe3a6 100644 --- a/tests/modules/svdb/query/test.yml +++ b/tests/modules/svdb/query/test.yml @@ -4,4 +4,12 @@ - svdb - svdb/query files: - - path: output/svdb/test_ann_svdbq.vcf + - path: output/svdb/test_query.vcf + +- name: svdb query multiple + command: nextflow run ./tests/modules/svdb/query -entry test_svdb_query_multiple -c ./tests/config/nextflow.config -c ./tests/modules/svdb/query/nextflow.config + tags: + - svdb + - svdb/query + files: + - path: output/svdb/test_query.vcf From 8c0089785381249a38a6b2feb02c41beb314a8fa Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Mon, 21 Mar 2022 11:26:12 +0100 Subject: [PATCH 051/283] Add AdapterRemovalFixPrefix (#1424) * Add AdapterRemovalFixPrefix * Prettifying Co-authored-by: Alexander Peltzer --- modules/adapterremovalfixprefix/main.nf | 38 ++++++++++++++++ modules/adapterremovalfixprefix/meta.yml | 43 +++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/adapterremovalfixprefix/main.nf | 19 ++++++++ .../adapterremovalfixprefix/nextflow.config | 9 ++++ .../modules/adapterremovalfixprefix/test.yml | 9 ++++ 6 files changed, 122 insertions(+) create mode 100644 modules/adapterremovalfixprefix/main.nf create mode 100644 modules/adapterremovalfixprefix/meta.yml create mode 100644 tests/modules/adapterremovalfixprefix/main.nf create mode 100644 tests/modules/adapterremovalfixprefix/nextflow.config create mode 100644 tests/modules/adapterremovalfixprefix/test.yml diff --git a/modules/adapterremovalfixprefix/main.nf b/modules/adapterremovalfixprefix/main.nf new file mode 100644 index 00000000..c0137fb4 --- /dev/null +++ b/modules/adapterremovalfixprefix/main.nf @@ -0,0 +1,38 @@ +def VERSION = '0.05' + +process ADAPTERREMOVALFIXPREFIX { + tag "$meta.id" + label 'process_medium' + + conda (params.enable_conda ? "bioconda::adapterremovalfixprefix=0.0.5" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/adapterremovalfixprefix:0.0.5--hdfd78af_2': + 'quay.io/biocontainers/adapterremovalfixprefix:0.0.5--hdfd78af_2' }" + + input: + tuple val(meta), path(fastq) + + output: + tuple val(meta), path("*.fq.gz"), emit: fixed_fastq + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + if ("$fastq" == "${prefix}.fq.gz") error "Input and output names are the same, set prefix in module configuration to disambiguate!" + + """ + AdapterRemovalFixPrefix \\ + $fastq \\ + $args \\ + | gzip > ${prefix}.fq.gz + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + adapterremovalfixprefix: $VERSION + END_VERSIONS + """ +} diff --git a/modules/adapterremovalfixprefix/meta.yml b/modules/adapterremovalfixprefix/meta.yml new file mode 100644 index 00000000..db69bc07 --- /dev/null +++ b/modules/adapterremovalfixprefix/meta.yml @@ -0,0 +1,43 @@ +name: adapterremovalfixprefix +description: Fixes prefixes from AdapterRemoval2 output to make sure no clashing read names are in the output. For use with DeDup. +keywords: + - adapterremoval + - fastq + - dedup +tools: + - adapterremovalfixprefix: + description: Fixes adapter removal prefixes to make sure no clashing read names are in the output. + homepage: https://github.com/apeltzer/AdapterRemovalFixPrefix + documentation: None + tool_dev_url: https://github.com/apeltzer/AdapterRemovalFixPrefix + doi: "10.1186/s13059-016-0918-z" + licence: ["GPL v3"] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - fastq: + type: file + description: FASTQ file from AdapterRemoval2 + pattern: "*.{fq.gz,fastq.gz}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - fixed_fastq: + type: file + description: FASTQ file with fixed read prefixes for DeDup + pattern: "*.{fq.gz}" + +authors: + - "@jfy133" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index ea17ce2e..20ca238f 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -14,6 +14,10 @@ adapterremoval: - modules/adapterremoval/** - tests/modules/adapterremoval/** +adapterremovalfixprefix: + - modules/adapterremovalfixprefix/** + - tests/modules/adapterremovalfixprefix/** + agrvate: - modules/agrvate/** - tests/modules/agrvate/** diff --git a/tests/modules/adapterremovalfixprefix/main.nf b/tests/modules/adapterremovalfixprefix/main.nf new file mode 100644 index 00000000..863a7ca8 --- /dev/null +++ b/tests/modules/adapterremovalfixprefix/main.nf @@ -0,0 +1,19 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { ADAPTERREMOVAL } from '../../../modules/adapterremoval/main.nf' +include { ADAPTERREMOVALFIXPREFIX } from '../../../modules/adapterremovalfixprefix/main.nf' + +workflow test_adapterremovalfixprefix { + + input = [ + [ id:'test', single_end:false ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + ] + + ADAPTERREMOVAL ( input, [] ) + ADAPTERREMOVALFIXPREFIX ( ADAPTERREMOVAL.out.collapsed ) +} diff --git a/tests/modules/adapterremovalfixprefix/nextflow.config b/tests/modules/adapterremovalfixprefix/nextflow.config new file mode 100644 index 00000000..dcfdc48c --- /dev/null +++ b/tests/modules/adapterremovalfixprefix/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: ADAPTERREMOVAL { + ext.args = "--collapse" + } + +} diff --git a/tests/modules/adapterremovalfixprefix/test.yml b/tests/modules/adapterremovalfixprefix/test.yml new file mode 100644 index 00000000..4ef6a41e --- /dev/null +++ b/tests/modules/adapterremovalfixprefix/test.yml @@ -0,0 +1,9 @@ +- name: adapterremovalfixprefix test_adapterremovalfixprefix + command: nextflow run tests/modules/adapterremovalfixprefix -entry test_adapterremovalfixprefix -c tests/config/nextflow.config + tags: + - adapterremovalfixprefix + files: + - path: output/adapterremovalfixprefix/test.fq.gz + md5sum: ff956de3532599a56c3efe5369f0953f + - path: output/adapterremovalfixprefix/versions.yml + md5sum: 983cb58079bf015c1d489a7e48261746 From e080f4c8acf5760039ed12ec1f206170f3f9a918 Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Mon, 21 Mar 2022 14:49:28 +0100 Subject: [PATCH 052/283] Untar unzip meta (#1408) * fix: remove left-over unnecessary code * Adds support for meta lists for unzip and untar * Fix test inputs * Update all modules to support extraction of decompressed file from untar/unzip new meta + file tuple * Update all modules to support extraction of decompressed file from untar/unzip new meta + file tuple * Fix MALTEXTRACT/AMPS * Fix further modules * Fix cellranger * Apply suggestions from code review Co-authored-by: Harshil Patel --- modules/untar/main.nf | 10 ++++----- modules/untar/meta.yml | 10 +++++++++ modules/unzip/main.nf | 6 ++--- modules/unzip/meta.yml | 10 +++++++++ tests/modules/amps/main.nf | 15 ++++++++----- tests/modules/artic/minion/main.nf | 4 ++-- tests/modules/cellranger/mkfastq/main.nf | 8 +++---- tests/modules/controlfreec/main.nf | 4 ++-- .../gatk4/createsomaticpanelofnormals/main.nf | 4 ++-- tests/modules/gatk4/genomicsdbimport/main.nf | 8 +++---- tests/modules/gatk4/genotypegvcfs/main.nf | 16 +++++++------- tests/modules/kraken2/kraken2/main.nf | 10 ++++----- tests/modules/last/lastal/main.nf | 8 +++---- tests/modules/last/train/main.nf | 4 ++-- tests/modules/malt/build_test/main.nf | 8 +++---- tests/modules/malt/run/main.nf | 8 +++---- tests/modules/maltextract/main.nf | 20 ++++++++++------- tests/modules/maltextract/test.yml | 2 -- tests/modules/metaphlan3/main.nf | 18 +++++++-------- tests/modules/sratools/fasterqdump/main.nf | 8 +++---- tests/modules/untar/main.nf | 5 ++++- tests/modules/unzip/main.nf | 5 ++++- tests/modules/vcfanno/main.nf | 22 +++++++++---------- 23 files changed, 121 insertions(+), 92 deletions(-) diff --git a/modules/untar/main.nf b/modules/untar/main.nf index bbae948a..dc43fb78 100644 --- a/modules/untar/main.nf +++ b/modules/untar/main.nf @@ -8,19 +8,19 @@ process UNTAR { 'biocontainers/biocontainers:v1.2.0_cv1' }" input: - path archive + tuple val(meta), path(archive) output: - path "$untar" , emit: untar - path "versions.yml", emit: versions + tuple val(meta), path("$untar"), emit: untar + path "versions.yml" , emit: versions when: task.ext.when == null || task.ext.when script: - def args = task.ext.args ?: '' + def args = task.ext.args ?: '' def args2 = task.ext.args2 ?: '' - untar = archive.toString() - '.tar.gz' + untar = archive.toString() - '.tar.gz' """ tar \\ -xzvf \\ diff --git a/modules/untar/meta.yml b/modules/untar/meta.yml index e877a97c..d426919b 100644 --- a/modules/untar/meta.yml +++ b/modules/untar/meta.yml @@ -10,11 +10,21 @@ tools: documentation: https://www.gnu.org/software/tar/manual/ licence: ["GPL-3.0-or-later"] input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] - archive: type: file description: File to be untar pattern: "*.{tar}.{gz}" output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] - untar: type: file description: diff --git a/modules/unzip/main.nf b/modules/unzip/main.nf index 1ce4b89b..8476a2eb 100644 --- a/modules/unzip/main.nf +++ b/modules/unzip/main.nf @@ -8,11 +8,11 @@ process UNZIP { 'quay.io/biocontainers/p7zip:15.09--h2d50403_4' }" input: - path archive + tuple val(meta), path(archive) output: - path "${archive.baseName}/", emit: unzipped_archive - path "versions.yml" , emit: versions + tuple val(meta), path("${archive.baseName}/"), emit: unzipped_archive + path "versions.yml" , emit: versions when: task.ext.when == null || task.ext.when diff --git a/modules/unzip/meta.yml b/modules/unzip/meta.yml index 79361527..f924bfb4 100644 --- a/modules/unzip/meta.yml +++ b/modules/unzip/meta.yml @@ -12,12 +12,22 @@ tools: licence: ["LGPL-2.1-or-later"] input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] - archive: type: file description: ZIP file pattern: "*.zip" output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] - unzipped_archive: type: directory description: Directory contents of the unzipped archive diff --git a/tests/modules/amps/main.nf b/tests/modules/amps/main.nf index 15572096..7c4969e9 100644 --- a/tests/modules/amps/main.nf +++ b/tests/modules/amps/main.nf @@ -15,18 +15,21 @@ workflow test_amps { fastas = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) gff = [] seq_type = "DNA" - map_db = file("https://software-ab.informatik.uni-tuebingen.de/download/megan6/megan-nucl-Jan2021.db.zip", checkIfExists: true) - input = file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + map_db = [ [], file("https://software-ab.informatik.uni-tuebingen.de/download/megan6/megan-nucl-Jan2021.db.zip", checkIfExists: true) ] + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] mode = "BlastN" taxon_list = file(params.test_data['sarscov2']['genome']['taxon_list_txt'], checkIfExists: true) - ncbi_dir = file(params.test_data['sarscov2']['genome']['ncbi_taxmap_zip'], checkIfExists: true) - filter = "def_anc" + ncbi_dir = [ [], file(params.test_data['sarscov2']['genome']['ncbi_taxmap_zip'], checkIfExists: true) ] UNZIP_MALT ( map_db ) UNZIP_MALTEXTRACT ( ncbi_dir ) - MALT_BUILD ( fastas, seq_type, gff, UNZIP_MALT.out.unzipped_archive ) + MALT_BUILD ( fastas, seq_type, gff, UNZIP_MALT.out.unzipped_archive.map{ it[1] } ) MALT_RUN ( input, mode, MALT_BUILD.out.index ) - MALTEXTRACT ( MALT_RUN.out.rma6, taxon_list, UNZIP_MALTEXTRACT.out.unzipped_archive) + ch_input_to_maltextract = MALT_RUN.out.rma6.map{ it[1] } + MALTEXTRACT ( ch_input_to_maltextract, taxon_list, UNZIP_MALTEXTRACT.out.unzipped_archive.map{ it[1] }) AMPS ( MALTEXTRACT.out.results, taxon_list, filter ) } diff --git a/tests/modules/artic/minion/main.nf b/tests/modules/artic/minion/main.nf index 3bda2ffc..b70b549e 100644 --- a/tests/modules/artic/minion/main.nf +++ b/tests/modules/artic/minion/main.nf @@ -11,12 +11,12 @@ workflow test_artic_minion { [ id:'test', single_end:false ], // meta map file(params.test_data['sarscov2']['nanopore']['test_fastq_gz'], checkIfExists: true) ] - fast5_tar = file(params.test_data['sarscov2']['nanopore']['fast5_tar_gz'], checkIfExists: true) + fast5_tar = [ [], file(params.test_data['sarscov2']['nanopore']['fast5_tar_gz'], checkIfExists: true) ] sequencing_summary = file(params.test_data['sarscov2']['nanopore']['test_sequencing_summary'], checkIfExists: true) fasta = file('https://github.com/artic-network/primer-schemes/raw/master/nCoV-2019/V3/nCoV-2019.reference.fasta', checkIfExists: true) bed = file('https://github.com/artic-network/primer-schemes/raw/master/nCoV-2019/V3/nCoV-2019.primer.bed', checkIfExists: true) - fast5_dir = UNTAR ( fast5_tar ).untar + fast5_dir = UNTAR ( fast5_tar ).untar.map{ it[1] } ARTIC_MINION ( input, fast5_dir, sequencing_summary, fasta, bed, [], '', 'nCoV-2019', '3') } diff --git a/tests/modules/cellranger/mkfastq/main.nf b/tests/modules/cellranger/mkfastq/main.nf index 5e594fd1..7bad0fec 100644 --- a/tests/modules/cellranger/mkfastq/main.nf +++ b/tests/modules/cellranger/mkfastq/main.nf @@ -8,19 +8,19 @@ include { CELLRANGER_MKFASTQ } from '../../../../modules/cellranger/mkfastq/main workflow test_cellranger_mkfastq_simple { simple_csv = file("https://cf.10xgenomics.com/supp/cell-exp/cellranger-tiny-bcl-simple-1.2.0.csv", checkIfExists: true) - tiny_bcl = file("https://cf.10xgenomics.com/supp/cell-exp/cellranger-tiny-bcl-1.2.0.tar.gz", checkIfExists: true) + tiny_bcl = [ [], file("https://cf.10xgenomics.com/supp/cell-exp/cellranger-tiny-bcl-1.2.0.tar.gz", checkIfExists: true) ] UNTAR ( tiny_bcl ) - CELLRANGER_MKFASTQ ( UNTAR.out.untar, simple_csv) + CELLRANGER_MKFASTQ ( UNTAR.out.untar.map{ it[1] }, simple_csv) } workflow test_cellranger_mkfastq_illumina { samplesheet_csv = file("https://cf.10xgenomics.com/supp/cell-exp/cellranger-tiny-bcl-samplesheet-1.2.0.csv", checkIfExists: true) - tiny_bcl = file("https://cf.10xgenomics.com/supp/cell-exp/cellranger-tiny-bcl-1.2.0.tar.gz", checkIfExists: true) + tiny_bcl = [ [], file("https://cf.10xgenomics.com/supp/cell-exp/cellranger-tiny-bcl-1.2.0.tar.gz", checkIfExists: true) ] UNTAR ( tiny_bcl ) - CELLRANGER_MKFASTQ ( UNTAR.out.untar, samplesheet_csv) + CELLRANGER_MKFASTQ ( UNTAR.out.untar.map{ it[1] }, samplesheet_csv) } diff --git a/tests/modules/controlfreec/main.nf b/tests/modules/controlfreec/main.nf index 576a845c..247f9887 100644 --- a/tests/modules/controlfreec/main.nf +++ b/tests/modules/controlfreec/main.nf @@ -19,7 +19,7 @@ workflow test_controlfreec { dbsnp = file(params.test_data['homo_sapiens']['genome']['dbsnp_138_hg38_21_vcf_gz'], checkIfExists: true) dbsnp_tbi = file(params.test_data['homo_sapiens']['genome']['dbsnp_138_hg38_21_vcf_gz_tbi'], checkIfExists: true) - chrfiles = file(params.test_data['homo_sapiens']['genome']['genome_21_chromosomes_dir'], checkIfExists: true) + chrfiles = [ [], file(params.test_data['homo_sapiens']['genome']['genome_21_chromosomes_dir'], checkIfExists: true) ] target_bed = file(params.test_data['homo_sapiens']['genome']['genome_21_multi_interval_bed'], checkIfExists: true) UNTAR(chrfiles) @@ -29,7 +29,7 @@ workflow test_controlfreec { [], dbsnp, dbsnp_tbi, - UNTAR.out.untar, + UNTAR.out.untar.map{ it[1] }, [], target_bed, [] diff --git a/tests/modules/gatk4/createsomaticpanelofnormals/main.nf b/tests/modules/gatk4/createsomaticpanelofnormals/main.nf index 5e1d1904..3316f73a 100644 --- a/tests/modules/gatk4/createsomaticpanelofnormals/main.nf +++ b/tests/modules/gatk4/createsomaticpanelofnormals/main.nf @@ -6,12 +6,12 @@ include { UNTAR } from '../../../../modules/untar/main.nf' include { GATK4_CREATESOMATICPANELOFNORMALS } from '../../../../modules/gatk4/createsomaticpanelofnormals/main.nf' workflow test_gatk4_createsomaticpanelofnormals { - db = file(params.test_data['homo_sapiens']['illumina']['test_genomicsdb_tar_gz'], checkIfExists: true) + db = [[], file(params.test_data['homo_sapiens']['illumina']['test_genomicsdb_tar_gz'], checkIfExists: true) ] UNTAR ( db ) input = Channel.of([ id:'test']) - .combine(UNTAR.out.untar) + .combine(UNTAR.out.untar.map{ it[1] }) fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) fastaidx = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) diff --git a/tests/modules/gatk4/genomicsdbimport/main.nf b/tests/modules/gatk4/genomicsdbimport/main.nf index 417a08a4..6d990f4e 100644 --- a/tests/modules/gatk4/genomicsdbimport/main.nf +++ b/tests/modules/gatk4/genomicsdbimport/main.nf @@ -22,7 +22,7 @@ workflow test_gatk4_genomicsdbimport_create_genomicsdb { } workflow test_gatk4_genomicsdbimport_get_intervalslist { - db = file(params.test_data['homo_sapiens']['illumina']['test_genomicsdb_tar_gz'], checkIfExists: true) + db = [ [], file(params.test_data['homo_sapiens']['illumina']['test_genomicsdb_tar_gz'], checkIfExists: true) ] UNTAR ( db ) @@ -31,7 +31,7 @@ workflow test_gatk4_genomicsdbimport_get_intervalslist { [] , [] , [] ]) - .combine(UNTAR.out.untar) + .combine(UNTAR.out.untar.map{ it[1] }) run_intlist = true run_updatewspace = false @@ -41,7 +41,7 @@ workflow test_gatk4_genomicsdbimport_get_intervalslist { } workflow test_gatk4_genomicsdbimport_update_genomicsdb { - db = file(params.test_data['homo_sapiens']['illumina']['test_genomicsdb_tar_gz'], checkIfExists: true) + db = [ [], file(params.test_data['homo_sapiens']['illumina']['test_genomicsdb_tar_gz'], checkIfExists: true) ] UNTAR ( db ) @@ -50,7 +50,7 @@ workflow test_gatk4_genomicsdbimport_update_genomicsdb { file( params.test_data['homo_sapiens']['illumina']['test2_genome_vcf_gz_tbi'] , checkIfExists: true) , [] , [] ]) - .combine(UNTAR.out.untar) + .combine(UNTAR.out.untar.map{ it[1] }) run_intlist = false run_updatewspace = true diff --git a/tests/modules/gatk4/genotypegvcfs/main.nf b/tests/modules/gatk4/genotypegvcfs/main.nf index 6191df26..a5ae8d46 100644 --- a/tests/modules/gatk4/genotypegvcfs/main.nf +++ b/tests/modules/gatk4/genotypegvcfs/main.nf @@ -97,10 +97,10 @@ workflow test_gatk4_genotypegvcfs_gendb_input { fastaIndex = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) fastaDict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) - test_genomicsdb = file(params.test_data['homo_sapiens']['illumina']['test_genomicsdb_tar_gz'], checkIfExists: true) + test_genomicsdb = [ [], file(params.test_data['homo_sapiens']['illumina']['test_genomicsdb_tar_gz'], checkIfExists: true) ] UNTAR ( test_genomicsdb ) - gendb = UNTAR.out.untar.collect() + gendb = UNTAR.out.untar.map{ it[1] }.collect() gendb.add([]) gendb.add([]) @@ -119,10 +119,10 @@ workflow test_gatk4_genotypegvcfs_gendb_input_dbsnp { dbsnp = file(params.test_data['homo_sapiens']['genome']['dbsnp_146_hg38_vcf_gz'], checkIfExists: true) dbsnpIndex = file(params.test_data['homo_sapiens']['genome']['dbsnp_146_hg38_vcf_gz_tbi'], checkIfExists: true) - test_genomicsdb = file(params.test_data['homo_sapiens']['illumina']['test_genomicsdb_tar_gz'], checkIfExists: true) + test_genomicsdb = [ [], file(params.test_data['homo_sapiens']['illumina']['test_genomicsdb_tar_gz'], checkIfExists: true) ] UNTAR ( test_genomicsdb ) - gendb = UNTAR.out.untar.collect() + gendb = UNTAR.out.untar.map{ it[1] }.collect() gendb.add([]) gendb.add([]) input = Channel.of([ id:'test' ]).combine(gendb) @@ -137,10 +137,10 @@ workflow test_gatk4_genotypegvcfs_gendb_input_intervals { fastaIndex = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) fastaDict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) - test_genomicsdb = file(params.test_data['homo_sapiens']['illumina']['test_genomicsdb_tar_gz'], checkIfExists: true) + test_genomicsdb = [ [], file(params.test_data['homo_sapiens']['illumina']['test_genomicsdb_tar_gz'], checkIfExists: true) ] UNTAR ( test_genomicsdb ) - gendb = UNTAR.out.untar.collect() + gendb = UNTAR.out.untar.map{ it[1] }.collect() gendb.add([]) gendb.add([file(params.test_data['homo_sapiens']['genome']['genome_bed'], checkIfExists: true)]) input = Channel.of([ id:'test' ]).combine(gendb) @@ -158,10 +158,10 @@ workflow test_gatk4_genotypegvcfs_gendb_input_dbsnp_intervals { dbsnp = file(params.test_data['homo_sapiens']['genome']['dbsnp_146_hg38_vcf_gz'], checkIfExists: true) dbsnpIndex = file(params.test_data['homo_sapiens']['genome']['dbsnp_146_hg38_vcf_gz_tbi'], checkIfExists: true) - test_genomicsdb = file(params.test_data['homo_sapiens']['illumina']['test_genomicsdb_tar_gz'], checkIfExists: true) + test_genomicsdb = [ [], file(params.test_data['homo_sapiens']['illumina']['test_genomicsdb_tar_gz'], checkIfExists: true) ] UNTAR ( test_genomicsdb ) - gendb = UNTAR.out.untar.collect() + gendb = UNTAR.out.untar.map{ it[1] }.collect() gendb.add([]) gendb.add([file(params.test_data['homo_sapiens']['genome']['genome_bed'], checkIfExists: true)]) input = Channel.of([ id:'test' ]).combine(gendb) diff --git a/tests/modules/kraken2/kraken2/main.nf b/tests/modules/kraken2/kraken2/main.nf index 12399e9e..94f4db95 100644 --- a/tests/modules/kraken2/kraken2/main.nf +++ b/tests/modules/kraken2/kraken2/main.nf @@ -9,10 +9,10 @@ workflow test_kraken2_kraken2_single_end { input = [ [ id:'test', single_end:true ], // meta map [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] ] - db = file(params.test_data['sarscov2']['genome']['kraken2_tar_gz'], checkIfExists: true) + db = [ [], file(params.test_data['sarscov2']['genome']['kraken2_tar_gz'], checkIfExists: true) ] UNTAR ( db ) - KRAKEN2_KRAKEN2 ( input, UNTAR.out.untar ) + KRAKEN2_KRAKEN2 ( input, UNTAR.out.untar.map{ it[1] } ) } workflow test_kraken2_kraken2_paired_end { @@ -20,8 +20,8 @@ workflow test_kraken2_kraken2_paired_end { [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] ] - db = file(params.test_data['sarscov2']['genome']['kraken2_tar_gz'], checkIfExists: true) - + db = [ [], file(params.test_data['sarscov2']['genome']['kraken2_tar_gz'], checkIfExists: true) ] + UNTAR ( db ) - KRAKEN2_KRAKEN2 ( input, UNTAR.out.untar ) + KRAKEN2_KRAKEN2 ( input, UNTAR.out.untar.map{ it[1] } ) } diff --git a/tests/modules/last/lastal/main.nf b/tests/modules/last/lastal/main.nf index 95c2f917..76e124c3 100644 --- a/tests/modules/last/lastal/main.nf +++ b/tests/modules/last/lastal/main.nf @@ -10,10 +10,10 @@ workflow test_last_lastal_with_dummy_param_file { input = [ [ id:'contigs', single_end:false ], // meta map file(params.test_data['sarscov2']['illumina']['contigs_fasta'], checkIfExists: true), [] ] - db = [ file(params.test_data['sarscov2']['genome']['lastdb_tar_gz'], checkIfExists: true) ] + db = [ [], file(params.test_data['sarscov2']['genome']['lastdb_tar_gz'], checkIfExists: true) ] UNTAR ( db ) - LAST_LASTAL ( input, UNTAR.out.untar) + LAST_LASTAL ( input, UNTAR.out.untar.map{ it[1] }) } workflow test_last_lastal_with_real_param_file { @@ -21,8 +21,8 @@ workflow test_last_lastal_with_real_param_file { input = [ [ id:'contigs', single_end:false ], // meta map file(params.test_data['sarscov2']['illumina']['contigs_fasta'], checkIfExists: true), file(params.test_data['sarscov2']['genome']['contigs_genome_par'], checkIfExists: true) ] - db = [ file(params.test_data['sarscov2']['genome']['lastdb_tar_gz'], checkIfExists: true) ] + db = [ [], file(params.test_data['sarscov2']['genome']['lastdb_tar_gz'], checkIfExists: true) ] UNTAR ( db ) - LAST_LASTAL ( input, UNTAR.out.untar) + LAST_LASTAL ( input, UNTAR.out.untar.map{ it[1] }) } diff --git a/tests/modules/last/train/main.nf b/tests/modules/last/train/main.nf index 0f280a82..2e10735e 100644 --- a/tests/modules/last/train/main.nf +++ b/tests/modules/last/train/main.nf @@ -7,9 +7,9 @@ include { LAST_TRAIN } from '../../../../modules/last/train/main.nf' workflow test_last_train { - db = [ file(params.test_data['sarscov2']['genome']['lastdb_tar_gz'], checkIfExists: true) ] + db = [ [], file(params.test_data['sarscov2']['genome']['lastdb_tar_gz'], checkIfExists: true) ] input = [ [ id:'contigs' ], // meta map file(params.test_data['sarscov2']['illumina']['contigs_fasta'], checkIfExists: true) ] UNTAR ( db ) - LAST_TRAIN ( input, UNTAR.out.untar ) + LAST_TRAIN ( input, UNTAR.out.untar.map{ it[1] } ) } diff --git a/tests/modules/malt/build_test/main.nf b/tests/modules/malt/build_test/main.nf index 2542da0c..c314d82c 100644 --- a/tests/modules/malt/build_test/main.nf +++ b/tests/modules/malt/build_test/main.nf @@ -9,18 +9,18 @@ workflow test_malt_build { fastas = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) seq_type = "DNA" gff = [] - map_db = file("https://software-ab.informatik.uni-tuebingen.de/download/megan6/megan-nucl-Jan2021.db.zip", checkIfExists: true) + map_db = [ [], file("https://software-ab.informatik.uni-tuebingen.de/download/megan6/megan-nucl-Jan2021.db.zip", checkIfExists: true) ] UNZIP ( map_db ) - MALT_BUILD ( fastas, seq_type, gff, UNZIP.out.unzipped_archive ) + MALT_BUILD ( fastas, seq_type, gff, UNZIP.out.unzipped_archive.map{ it[1] } ) } workflow test_malt_build_gff { fastas = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) seq_type = "DNA" gff = file(params.test_data['sarscov2']['genome']['genome_gff3'], checkIfExists: true) - map_db = file("https://software-ab.informatik.uni-tuebingen.de/download/megan6/megan-nucl-Jan2021.db.zip", checkIfExists: true) + map_db = [ [], file("https://software-ab.informatik.uni-tuebingen.de/download/megan6/megan-nucl-Jan2021.db.zip", checkIfExists: true) ] UNZIP ( map_db ) - MALT_BUILD ( fastas, seq_type, gff, UNZIP.out.unzipped_archive ) + MALT_BUILD ( fastas, seq_type, gff, UNZIP.out.unzipped_archive.map{ it[1] } ) } diff --git a/tests/modules/malt/run/main.nf b/tests/modules/malt/run/main.nf index d92dee71..7501652d 100644 --- a/tests/modules/malt/run/main.nf +++ b/tests/modules/malt/run/main.nf @@ -2,16 +2,16 @@ nextflow.enable.dsl = 2 -include { UNZIP } from '../../../../modules/unzip/main.nf' +include { UNZIP } from '../../../../modules/unzip/main.nf' include { MALT_BUILD } from '../../../../modules/malt/build/main.nf' -include { MALT_RUN } from '../../../../modules/malt/run/main.nf' +include { MALT_RUN } from '../../../../modules/malt/run/main.nf' workflow test_malt_run { fastas = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) gff = file(params.test_data['sarscov2']['genome']['genome_gff3'], checkIfExists: true) seq_type = "DNA" - map_db = file("https://software-ab.informatik.uni-tuebingen.de/download/megan6/megan-nucl-Jan2021.db.zip", checkIfExists: true) + map_db = [ [], file("https://software-ab.informatik.uni-tuebingen.de/download/megan6/megan-nucl-Jan2021.db.zip", checkIfExists: true) ] input = [ [ id:'test', single_end:false ], // meta map file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) @@ -19,7 +19,7 @@ workflow test_malt_run { mode = "BlastN" UNZIP ( map_db ) - MALT_BUILD ( fastas, seq_type, gff, UNZIP.out.unzipped_archive ) + MALT_BUILD ( fastas, seq_type, gff, UNZIP.out.unzipped_archive.map { it[1] } ) MALT_RUN ( input, mode, MALT_BUILD.out.index ) } diff --git a/tests/modules/maltextract/main.nf b/tests/modules/maltextract/main.nf index 8e0a2241..ed1e32c9 100644 --- a/tests/modules/maltextract/main.nf +++ b/tests/modules/maltextract/main.nf @@ -4,24 +4,28 @@ nextflow.enable.dsl = 2 include { UNZIP as UNZIP_MALT } from '../../../modules/unzip/main.nf' include { UNZIP as UNZIP_MALTEXTRACT } from '../../../modules/unzip/main.nf' -include { MALT_BUILD } from '../../../modules/malt/build/main.nf' -include { MALT_RUN } from '../../../modules/malt/run/main.nf' -include { MALTEXTRACT } from '../../../modules/maltextract/main.nf' +include { MALT_BUILD } from '../../../modules/malt/build/main.nf' +include { MALT_RUN } from '../../../modules/malt/run/main.nf' +include { MALTEXTRACT } from '../../../modules/maltextract/main.nf' workflow test_maltextract { fastas = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) gff = [] seq_type = "DNA" - map_db = file("https://software-ab.informatik.uni-tuebingen.de/download/megan6/megan-nucl-Jan2021.db.zip", checkIfExists: true) - input = file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + map_db = [ [], file("https://software-ab.informatik.uni-tuebingen.de/download/megan6/megan-nucl-Jan2021.db.zip", checkIfExists: true) ] + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] mode = "BlastN" taxon_list = file(params.test_data['sarscov2']['genome']['taxon_list_txt'], checkIfExists: true) - ncbi_dir = file(params.test_data['sarscov2']['genome']['ncbi_taxmap_zip'], checkIfExists: true) + ncbi_dir = [ [], file(params.test_data['sarscov2']['genome']['ncbi_taxmap_zip'], checkIfExists: true) ] UNZIP_MALT ( map_db ) UNZIP_MALTEXTRACT ( ncbi_dir ) - MALT_BUILD ( fastas, seq_type, gff, UNZIP_MALT.out.unzipped_archive ) + MALT_BUILD ( fastas, seq_type, gff, UNZIP_MALT.out.unzipped_archive.map{ it[1] } ) MALT_RUN ( input, mode, MALT_BUILD.out.index ) - MALTEXTRACT ( MALT_RUN.out.rma6, taxon_list, UNZIP_MALTEXTRACT.out.unzipped_archive) + ch_input_to_maltextract = MALT_RUN.out.rma6.map{ it[1] } + MALTEXTRACT ( ch_input_to_maltextract, taxon_list, UNZIP_MALTEXTRACT.out.unzipped_archive.map{ it[1] }) } diff --git a/tests/modules/maltextract/test.yml b/tests/modules/maltextract/test.yml index 2440c100..6802fe8b 100644 --- a/tests/modules/maltextract/test.yml +++ b/tests/modules/maltextract/test.yml @@ -3,8 +3,6 @@ tags: - maltextract files: - - path: output/maltextract/results/error.txt - md5sum: d41d8cd98f00b204e9800998ecf8427e - path: output/maltextract/results/error.txt - path: output/maltextract/results/log.txt contains: diff --git a/tests/modules/metaphlan3/main.nf b/tests/modules/metaphlan3/main.nf index 3354d2d9..5d2ee1d1 100644 --- a/tests/modules/metaphlan3/main.nf +++ b/tests/modules/metaphlan3/main.nf @@ -12,10 +12,10 @@ workflow test_metaphlan3_single_end { [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] ] - db = channel.fromPath('https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/delete_me/metaphlan_database.tar.gz', type: 'dir', checkIfExists: true) + db = [ [], file('https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/delete_me/metaphlan_database.tar.gz', checkIfExists: true) ] UNTAR ( db ) - METAPHLAN3 ( input, UNTAR.out.untar ) + METAPHLAN3 ( input, UNTAR.out.untar.map{ it[1] } ) } workflow test_metaphlan3_paired_end { @@ -25,11 +25,10 @@ workflow test_metaphlan3_paired_end { file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] ] - db = channel.fromPath('https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/delete_me/metaphlan_database.tar.gz', type: 'dir', checkIfExists: true) - + db = [ [], file('https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/delete_me/metaphlan_database.tar.gz', checkIfExists: true) ] UNTAR ( db ) - METAPHLAN3 ( input, UNTAR.out.untar ) + METAPHLAN3 ( input, UNTAR.out.untar.map{ it[1] } ) } workflow test_metaphlan3_sam { @@ -38,12 +37,11 @@ workflow test_metaphlan3_sam { [ file(params.test_data['sarscov2']['illumina']['test_single_end_bam'], checkIfExists: true) ] ] - db = channel.fromPath('https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/delete_me/metaphlan_database.tar.gz', type: 'dir', checkIfExists: true) - + db = [ [], file('https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/delete_me/metaphlan_database.tar.gz', checkIfExists: true) ] UNTAR ( db ) SAMTOOLS_VIEW ( input, [] ) - METAPHLAN3 ( SAMTOOLS_VIEW.out.bam, UNTAR.out.untar ) + METAPHLAN3 ( SAMTOOLS_VIEW.out.bam, UNTAR.out.untar.map{ it[1] } ) } workflow test_metaphlan3_fasta { @@ -52,8 +50,8 @@ workflow test_metaphlan3_fasta { [ file(params.test_data['sarscov2']['genome']['transcriptome_fasta'], checkIfExists: true) ] ] - db = channel.fromPath('https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/delete_me/metaphlan_database.tar.gz', type: 'dir', checkIfExists: true) + db = [ [], file('https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/delete_me/metaphlan_database.tar.gz', checkIfExists: true) ] UNTAR ( db ) - METAPHLAN3 ( input, UNTAR.out.untar ) + METAPHLAN3 ( input, UNTAR.out.untar.map{ it[1] } ) } diff --git a/tests/modules/sratools/fasterqdump/main.nf b/tests/modules/sratools/fasterqdump/main.nf index 2f838fd2..c2b98526 100644 --- a/tests/modules/sratools/fasterqdump/main.nf +++ b/tests/modules/sratools/fasterqdump/main.nf @@ -7,22 +7,22 @@ include { SRATOOLS_FASTERQDUMP } from '../../../../modules/sratools/fasterqdump/ workflow test_sratools_fasterqdump_single_end { - archive = file(params.test_data['sarscov2']['illumina']['SRR13255544_tar_gz'], checkIfExists: true) + archive = [ [], file(params.test_data['sarscov2']['illumina']['SRR13255544_tar_gz'], checkIfExists: true) ] UNTAR ( archive ) def input = Channel.of([ id:'test_single_end', single_end:true ]) - .combine(UNTAR.out.untar) + .combine(UNTAR.out.untar.map{ it[1] }) SRATOOLS_FASTERQDUMP ( input ) } workflow test_sratools_fasterqdump_paired_end { - archive = file(params.test_data['sarscov2']['illumina']['SRR11140744_tar_gz'], checkIfExists: true) + archive = [ [], file(params.test_data['sarscov2']['illumina']['SRR11140744_tar_gz'], checkIfExists: true) ] UNTAR ( archive ) def input = Channel.of([ id:'test_paired_end', single_end:false ]) - .combine(UNTAR.out.untar) + .combine(UNTAR.out.untar.map{ it[1] }) SRATOOLS_FASTERQDUMP ( input ) } diff --git a/tests/modules/untar/main.nf b/tests/modules/untar/main.nf index 056e3ea7..9d6d4c6c 100644 --- a/tests/modules/untar/main.nf +++ b/tests/modules/untar/main.nf @@ -5,7 +5,10 @@ nextflow.enable.dsl = 2 include { UNTAR } from '../../../modules/untar/main.nf' workflow test_untar { - input = file(params.test_data['sarscov2']['genome']['kraken2_tar_gz'], checkIfExists: true) + input = [ + [], + file(params.test_data['sarscov2']['genome']['kraken2_tar_gz'], checkIfExists: true) + ] UNTAR ( input ) } diff --git a/tests/modules/unzip/main.nf b/tests/modules/unzip/main.nf index 520fe31e..6f7cc030 100644 --- a/tests/modules/unzip/main.nf +++ b/tests/modules/unzip/main.nf @@ -6,7 +6,10 @@ include { UNZIP } from '../../../modules/unzip/main.nf' workflow test_unzip { - archive = file(params.test_data['sarscov2']['genome']['ncbi_taxmap_zip'], checkIfExists: true) + archive = [ + [], + file(params.test_data['sarscov2']['genome']['ncbi_taxmap_zip'], checkIfExists: true) + ] UNZIP ( archive ) } diff --git a/tests/modules/vcfanno/main.nf b/tests/modules/vcfanno/main.nf index 045ffa31..c879ebbd 100644 --- a/tests/modules/vcfanno/main.nf +++ b/tests/modules/vcfanno/main.nf @@ -6,36 +6,36 @@ include { UNTAR } from '../../../modules/untar/main.nf' include { VCFANNO } from '../../../modules/vcfanno/main.nf' workflow test_vcfanno { - - input = [ + + input = [ [ id:'test_compressed', single_end:false ], // meta map file(params.test_data['sarscov2']['illumina']['test_vcf_gz'], checkIfExists: true), file(params.test_data['sarscov2']['illumina']['test_vcf_gz_tbi'], checkIfExists: true) ] - - input_2 = [ [ id:'test_compressed', single_end:false ], // meta map + + input_2 = [ [ id:'test_compressed', single_end:false ], // meta map [] ] toml = file(params.test_data['homo_sapiens']['genome']['vcfanno_toml'], checkIfExists: true) - resource_dir = file(params.test_data['homo_sapiens']['genome']['vcfanno_tar_gz'], checkIfExists: true) + resource_dir = [[], file(params.test_data['homo_sapiens']['genome']['vcfanno_tar_gz'], checkIfExists: true) ] UNTAR ( resource_dir ) - VCFANNO ( input, input_2, toml, UNTAR.out.untar ) + VCFANNO ( input, input_2, toml, UNTAR.out.untar.map{ it[1] } ) } workflow test_vcfanno_uncompressed { input = [ [ id:'test_uncompressed', single_end:false ], // meta map [] ,[] ] - - input_2 = [ + + input_2 = [ [ id:'test_uncompressed', single_end:false ], // meta map file(params.test_data['sarscov2']['illumina']['test_vcf'], checkIfExists: true) ] toml = file(params.test_data['homo_sapiens']['genome']['vcfanno_toml'], checkIfExists: true) - resource_dir = file(params.test_data['homo_sapiens']['genome']['vcfanno_tar_gz'], checkIfExists: true) + resource_dir = [[], file(params.test_data['homo_sapiens']['genome']['vcfanno_tar_gz'], checkIfExists: true) ] UNTAR ( resource_dir ) - VCFANNO ( input, input_2, toml, UNTAR.out.untar ) -} \ No newline at end of file + VCFANNO ( input, input_2, toml, UNTAR.out.untar.map{ it[1] } ) +} From 9c386c5dd8a42b5cb589cea7244e3960c8dfdacf Mon Sep 17 00:00:00 2001 From: "Moritz E. Beber" Date: Tue, 22 Mar 2022 16:40:21 +0100 Subject: [PATCH 053/283] Add bracken (#1423) * feat: add template for Bracken * chore: update version * refactor: change command build * refactor: rename report variable, change quotes * docs: remove refactored input parameter * fix: correctly assign arguments to options * tests: set up single and paired end tests * style: apply prettier * chore: change data sources to official ones * refactor: rename test workflows * tests: use correct input to the new UNTAR module * chore: update md5sums --- modules/bracken/bracken/main.nf | 41 +++++++++++++ modules/bracken/bracken/meta.yml | 45 +++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/config/test_data.config | 3 + tests/modules/bracken/bracken/main.nf | 57 +++++++++++++++++++ tests/modules/bracken/bracken/nextflow.config | 5 ++ tests/modules/bracken/bracken/test.yml | 43 ++++++++++++++ 7 files changed, 198 insertions(+) create mode 100644 modules/bracken/bracken/main.nf create mode 100644 modules/bracken/bracken/meta.yml create mode 100644 tests/modules/bracken/bracken/main.nf create mode 100644 tests/modules/bracken/bracken/nextflow.config create mode 100644 tests/modules/bracken/bracken/test.yml diff --git a/modules/bracken/bracken/main.nf b/modules/bracken/bracken/main.nf new file mode 100644 index 00000000..2027fd23 --- /dev/null +++ b/modules/bracken/bracken/main.nf @@ -0,0 +1,41 @@ +process BRACKEN_BRACKEN { + tag "$meta.id" + label 'process_low' + + conda (params.enable_conda ? "bioconda::bracken=2.6.2" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/bracken:2.6.2--py39hc16433a_0': + 'quay.io/biocontainers/bracken:2.6.2--py39hc16433a_0' }" + + input: + tuple val(meta), path(kraken_report) + path database + + output: + tuple val(meta), path(bracken_report), emit: reports + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def threshold = meta.threshold ?: 10 + def taxonomic_level = meta.taxonomic_level ?: 'S' + def read_length = meta.read_length ?: 150 + def args = task.ext.args ?: "-l ${taxonomic_level} -t ${threshold} -r ${read_length}" + def prefix = task.ext.prefix ?: "${meta.id}" + def bracken_version = '2.6.2' + bracken_report = "${prefix}_${taxonomic_level}.tsv" + """ + bracken \\ + ${args} \\ + -d '${database}' \\ + -i '${kraken_report}' \\ + -o '${bracken_report}' + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + bracken: ${bracken_version} + END_VERSIONS + """ +} diff --git a/modules/bracken/bracken/meta.yml b/modules/bracken/bracken/meta.yml new file mode 100644 index 00000000..19311618 --- /dev/null +++ b/modules/bracken/bracken/meta.yml @@ -0,0 +1,45 @@ +name: bracken_bracken +description: Re-estimate taxonomic abundance of metagenomic samples analyzed by kraken. +keywords: + - sort +tools: + - bracken: + description: Bracken (Bayesian Reestimation of Abundance with KrakEN) is a highly accurate statistical method that computes the abundance of species in DNA sequences from a metagenomics sample. + homepage: https://ccb.jhu.edu/software/bracken/ + documentation: https://ccb.jhu.edu/software/bracken/index.shtml?t=manual + tool_dev_url: https://github.com/jenniferlu717/Bracken + doi: "10.7717/peerj-cs.104" + licence: ["GPL v3"] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - kraken_report: + type: file + description: TSV file with six columns coming from kraken2 output + pattern: "*.{tsv}" + - database: + type: file + description: Directory containing the kraken2/Bracken files for analysis + pattern: "*" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - reports: + type: file + description: TSV output report of the re-estimated abundances + pattern: "*.{tsv}" + +authors: + - "@Midnighter" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 20ca238f..d3282321 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -313,6 +313,10 @@ bowtie2/build: - modules/bowtie2/build/** - tests/modules/bowtie2/build_test/** +bracken/bracken: + - modules/bracken/bracken/** + - tests/modules/bracken/bracken/** + bwa/aln: - modules/bwa/aln/** - tests/modules/bwa/aln/** diff --git a/tests/config/test_data.config b/tests/config/test_data.config index 45732f47..230e8d43 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -25,6 +25,9 @@ params { kraken2 = "${test_data_dir}/genomics/sarscov2/genome/db/kraken2" kraken2_tar_gz = "${test_data_dir}/genomics/sarscov2/genome/db/kraken2.tar.gz" + kraken2_bracken = "${test_data_dir}/genomics/sarscov2/genome/db/kraken2_bracken" + kraken2_bracken_tar_gz = "${test_data_dir}/genomics/sarscov2/genome/db/kraken2_bracken.tar.gz" + ncbi_taxmap_zip = "${test_data_dir}/genomics/sarscov2/genome/db/maltextract/ncbi_taxmap.zip" taxon_list_txt = "${test_data_dir}/genomics/sarscov2/genome/db/maltextract/taxon_list.txt" diff --git a/tests/modules/bracken/bracken/main.nf b/tests/modules/bracken/bracken/main.nf new file mode 100644 index 00000000..f882439b --- /dev/null +++ b/tests/modules/bracken/bracken/main.nf @@ -0,0 +1,57 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { UNTAR } from '../../../../modules/untar/main.nf' +include { KRAKEN2_KRAKEN2 } from '../../../../modules/kraken2/kraken2/main.nf' +include { BRACKEN_BRACKEN } from '../../../../modules/bracken/bracken/main.nf' + +workflow test_bracken_bracken_single_end_default_args { + input = [ [ id:'test', single_end:true ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] + ] + db = file(params.test_data['sarscov2']['genome']['kraken2_bracken_tar_gz'], checkIfExists: true) + + ch_db = UNTAR ( [[:], db] ).untar + .map { it[1] } + KRAKEN2_KRAKEN2 ( input, ch_db ) + BRACKEN_BRACKEN ( KRAKEN2_KRAKEN2.out.txt, ch_db ) +} + +workflow test_bracken_bracken_single_end_custom_args { + input = [ [ id:'test', single_end:true, threshold:0, taxonomic_level:'G', read_length:100 ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] + ] + db = file(params.test_data['sarscov2']['genome']['kraken2_bracken_tar_gz'], checkIfExists: true) + + ch_db = UNTAR ( [[:], db] ).untar + .map { it[1] } + KRAKEN2_KRAKEN2 ( input, ch_db ) + BRACKEN_BRACKEN ( KRAKEN2_KRAKEN2.out.txt, ch_db ) +} + +workflow test_bracken_bracken_paired_end_default_args { + input = [ [ id:'test', single_end:false ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] + ] + db = file(params.test_data['sarscov2']['genome']['kraken2_bracken_tar_gz'], checkIfExists: true) + + ch_db = UNTAR ( [[:], db] ).untar + .map { it[1] } + KRAKEN2_KRAKEN2 ( input, ch_db ) + BRACKEN_BRACKEN ( KRAKEN2_KRAKEN2.out.txt, ch_db ) +} + +workflow test_bracken_bracken_paired_end_custom_args { + input = [ [ id:'test', single_end:false, threshold:0, taxonomic_level:'G', read_length:100 ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] + ] + db = file(params.test_data['sarscov2']['genome']['kraken2_bracken_tar_gz'], checkIfExists: true) + + ch_db = UNTAR ( [[:], db] ).untar + .map { it[1] } + KRAKEN2_KRAKEN2 ( input, ch_db ) + BRACKEN_BRACKEN ( KRAKEN2_KRAKEN2.out.txt, ch_db ) +} diff --git a/tests/modules/bracken/bracken/nextflow.config b/tests/modules/bracken/bracken/nextflow.config new file mode 100644 index 00000000..50f50a7a --- /dev/null +++ b/tests/modules/bracken/bracken/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} \ No newline at end of file diff --git a/tests/modules/bracken/bracken/test.yml b/tests/modules/bracken/bracken/test.yml new file mode 100644 index 00000000..4c87cda7 --- /dev/null +++ b/tests/modules/bracken/bracken/test.yml @@ -0,0 +1,43 @@ +- name: bracken bracken test_bracken_bracken_single_end_default_args + command: nextflow run tests/modules/bracken/bracken -entry test_bracken_bracken_single_end_default_args -c tests/config/nextflow.config + tags: + - bracken/bracken + - bracken + files: + - path: output/bracken/test_S.tsv + md5sum: 4a21ae14ff8a0311d55f139af5247838 + - path: output/bracken/versions.yml + md5sum: ab8b1550f84a99bae80f050fe718abd0 + +- name: bracken bracken test_bracken_bracken_single_end_custom_args + command: nextflow run tests/modules/bracken/bracken -entry test_bracken_bracken_single_end_custom_args -c tests/config/nextflow.config + tags: + - bracken/bracken + - bracken + files: + - path: output/bracken/test_G.tsv + md5sum: f609b09d6edb5ebc1ea1435d1dd46cde + - path: output/bracken/versions.yml + md5sum: af87e8d4c42fbcb0469ab13912b8a9bd + +- name: bracken bracken test_bracken_bracken_paired_end_default_args + command: nextflow run tests/modules/bracken/bracken -entry test_bracken_bracken_paired_end_default_args -c tests/config/nextflow.config + tags: + - bracken/bracken + - bracken + files: + - path: output/bracken/test_S.tsv + md5sum: 4a21ae14ff8a0311d55f139af5247838 + - path: output/bracken/versions.yml + md5sum: 4602111eb25bd19a7f9d725acc5921f6 + +- name: bracken bracken test_bracken_bracken_paired_end_custom_args + command: nextflow run tests/modules/bracken/bracken -entry test_bracken_bracken_paired_end_custom_args -c tests/config/nextflow.config + tags: + - bracken/bracken + - bracken + files: + - path: output/bracken/test_G.tsv + md5sum: f609b09d6edb5ebc1ea1435d1dd46cde + - path: output/bracken/versions.yml + md5sum: d4618b01df5ac09cc366fe2ae7c13f06 From 1dddf1ce9443e3d93853d86e7a7aab52e5b4d614 Mon Sep 17 00:00:00 2001 From: Annick Renevey <47788523+rannick@users.noreply.github.com> Date: Wed, 23 Mar 2022 13:13:58 +0100 Subject: [PATCH 054/283] update star/align to include an optional sam output (support for --chimOutType SeparateSAMold option) (#1378) Co-authored-by: Praveen Raj S <43108054+praveenraj2018@users.noreply.github.com> --- modules/star/align/main.nf | 1 + 1 file changed, 1 insertion(+) diff --git a/modules/star/align/main.nf b/modules/star/align/main.nf index 7c965d07..762b84f6 100644 --- a/modules/star/align/main.nf +++ b/modules/star/align/main.nf @@ -29,6 +29,7 @@ process STAR_ALIGN { tuple val(meta), path('*fastq.gz') , optional:true, emit: fastq tuple val(meta), path('*.tab') , optional:true, emit: tab tuple val(meta), path('*.out.junction') , optional:true, emit: junction + tuple val(meta), path('*.out.sam') , optional:true, emit: sam when: task.ext.when == null || task.ext.when From debbcd98eb0aaaddd6a95fb21b8585aac28e07a6 Mon Sep 17 00:00:00 2001 From: Julianus Pfeuffer Date: Thu, 24 Mar 2022 14:51:56 +0100 Subject: [PATCH 055/283] [FIX/DOC] Broken Adding modules link (#1433) * [FIX/DOC] Broken Adding modules link * Apply suggestion Co-authored-by: Edmund Miller Co-authored-by: Edmund Miller --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 478fe8da..1d8312cc 100644 --- a/README.md +++ b/README.md @@ -133,7 +133,7 @@ We have written a helper command in the `nf-core/tools` package that uses the Gi ## Adding new modules -If you wish to contribute a new module, please see the documentation on the [nf-core website](https://nf-co.re/developers/adding_modules). +If you wish to contribute a new module, please see the documentation on the [nf-core website](https://nf-co.re/developers/modules#writing-a-new-module-reference). > Please be kind to our code reviewers and submit one pull request per module :) From dfbb0bc17cb6799892e1d4d2de2f4e08abae92b6 Mon Sep 17 00:00:00 2001 From: "Robert A. Petit III" Date: Thu, 24 Mar 2022 10:49:17 -0600 Subject: [PATCH 056/283] update bakta to latest version (v1.4.0) (#1428) * update bakta to latest version (v1.4.0) * Update test.yml * remove empty file md5sums * bakta was rebuilt to fix dependencies on bioconda * trigger ci Co-authored-by: Ramprasad Neethiraj <20065894+ramprasadn@users.noreply.github.com> --- modules/bakta/main.nf | 6 +++--- modules/bakta/meta.yml | 2 +- tests/modules/bakta/test.yml | 11 ----------- 3 files changed, 4 insertions(+), 15 deletions(-) diff --git a/modules/bakta/main.nf b/modules/bakta/main.nf index a7f971ef..70b2ecdb 100644 --- a/modules/bakta/main.nf +++ b/modules/bakta/main.nf @@ -2,10 +2,10 @@ process BAKTA { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? "bioconda::bakta=1.3.1" : null) + conda (params.enable_conda ? "bioconda::bakta=1.4.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/bakta:1.3.1--pyhdfd78af_0' : - 'quay.io/biocontainers/bakta:1.3.1--pyhdfd78af_0' }" + 'https://depot.galaxyproject.org/singularity/bakta:1.4.0--pyhdfd78af_1' : + 'quay.io/biocontainers/bakta:1.4.0--pyhdfd78af_1' }" input: tuple val(meta), path(fasta) diff --git a/modules/bakta/meta.yml b/modules/bakta/meta.yml index 2514a996..63f55ff3 100644 --- a/modules/bakta/meta.yml +++ b/modules/bakta/meta.yml @@ -78,7 +78,7 @@ output: pattern: "*.ffn" - hypotheticals_tsv: type: file - description: further information on hypothetical protein CDS as simple human readble tab separated values + description: additional information on hypothetical protein CDS as simple human readble tab separated values pattern: "*.hypotheticals.tsv" - hypotheticals_faa: type: file diff --git a/tests/modules/bakta/test.yml b/tests/modules/bakta/test.yml index dcfc32bc..1eb3d8a5 100644 --- a/tests/modules/bakta/test.yml +++ b/tests/modules/bakta/test.yml @@ -1,25 +1,14 @@ -## TODO nf-core: Please run the following command to build this file: -# nf-core modules create-test-yml bakta - name: bakta command: nextflow run ./tests/modules/bakta -entry test_bakta -c tests/config/nextflow.config -stub-run tags: - bakta files: - path: output/bakta/test.embl - md5sum: d41d8cd98f00b204e9800998ecf8427e - path: output/bakta/test.faa - md5sum: d41d8cd98f00b204e9800998ecf8427e - path: output/bakta/test.ffn - md5sum: d41d8cd98f00b204e9800998ecf8427e - path: output/bakta/test.fna - md5sum: d41d8cd98f00b204e9800998ecf8427e - path: output/bakta/test.gbff - md5sum: d41d8cd98f00b204e9800998ecf8427e - path: output/bakta/test.gff3 - md5sum: d41d8cd98f00b204e9800998ecf8427e - path: output/bakta/test.hypotheticals.tsv - md5sum: d41d8cd98f00b204e9800998ecf8427e - path: output/bakta/test.hypotheticals.faa - md5sum: d41d8cd98f00b204e9800998ecf8427e - path: output/bakta/test.tsv - md5sum: d41d8cd98f00b204e9800998ecf8427e From 608bb2259851beee55f997b3b7503cee674744e2 Mon Sep 17 00:00:00 2001 From: "Robert A. Petit III" Date: Thu, 24 Mar 2022 12:56:32 -0600 Subject: [PATCH 057/283] Update dragonflye to latest version (#1442) * Update dragonflye to latest version * trigger ci --- modules/dragonflye/main.nf | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/modules/dragonflye/main.nf b/modules/dragonflye/main.nf index 14597143..91b009d4 100644 --- a/modules/dragonflye/main.nf +++ b/modules/dragonflye/main.nf @@ -2,10 +2,10 @@ process DRAGONFLYE { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? "bioconda::dragonflye=1.0.4" : null) + conda (params.enable_conda ? "bioconda::dragonflye=1.0.11" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/dragonflye:1.0.4--hdfd78af_0' : - 'quay.io/biocontainers/dragonflye:1.0.4--hdfd78af_0' }" + 'https://depot.galaxyproject.org/singularity/dragonflye:1.0.11--hdfd78af_0' : + 'quay.io/biocontainers/dragonflye:1.0.11--hdfd78af_0' }" input: tuple val(meta), path(reads) @@ -32,6 +32,7 @@ process DRAGONFLYE { --ram $memory \\ --outdir ./ \\ --force + cat <<-END_VERSIONS > versions.yml "${task.process}": dragonflye: \$(dragonflye --version 2>&1 | sed 's/^.*dragonflye //' ) From fe088745e03f21d07477cc0c655c24ae9f72ac31 Mon Sep 17 00:00:00 2001 From: Edmund Miller Date: Thu, 24 Mar 2022 14:44:31 -0500 Subject: [PATCH 058/283] style(deeptools): Indent script (#1439) --- modules/deeptools/bamcoverage/main.nf | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/modules/deeptools/bamcoverage/main.nf b/modules/deeptools/bamcoverage/main.nf index 83e3ffeb..926bf0ad 100644 --- a/modules/deeptools/bamcoverage/main.nf +++ b/modules/deeptools/bamcoverage/main.nf @@ -23,11 +23,11 @@ process DEEPTOOLS_BAMCOVERAGE { def prefix = task.ext.prefix ?: "${meta.id}.bigWig" """ - bamCoverage \ - --bam $input \ - $args \ - --numberOfProcessors ${task.cpus} \ - --outFileName ${prefix} + bamCoverage \\ + --bam $input \\ + $args \\ + --numberOfProcessors ${task.cpus} \\ + --outFileName ${prefix} cat <<-END_VERSIONS > versions.yml "${task.process}": From 6806cd1de9f493ecba86960fc8a6e7d86e262d9b Mon Sep 17 00:00:00 2001 From: Edmund Miller Date: Thu, 24 Mar 2022 18:38:24 -0500 Subject: [PATCH 059/283] feat(bbmap): Initialize pileup module (#1441) * feat(bbmap): Initialize pileup module * test(bbmap): Update outputs * test(bbmap): Add pileup tags * style(bbmap): Add in when Co-authored-by: Robert A. Petit III --- modules/bbmap/pileup/main.nf | 39 ++++++++++++++++++ modules/bbmap/pileup/meta.yml | 47 ++++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/bbmap/pileup/main.nf | 15 +++++++ tests/modules/bbmap/pileup/nextflow.config | 5 +++ tests/modules/bbmap/pileup/test.yml | 12 ++++++ 6 files changed, 122 insertions(+) create mode 100644 modules/bbmap/pileup/main.nf create mode 100644 modules/bbmap/pileup/meta.yml create mode 100644 tests/modules/bbmap/pileup/main.nf create mode 100644 tests/modules/bbmap/pileup/nextflow.config create mode 100644 tests/modules/bbmap/pileup/test.yml diff --git a/modules/bbmap/pileup/main.nf b/modules/bbmap/pileup/main.nf new file mode 100644 index 00000000..8d424bc2 --- /dev/null +++ b/modules/bbmap/pileup/main.nf @@ -0,0 +1,39 @@ +process BBMAP_PILEUP { + tag "$meta.id" + label 'process_medium' + + conda (params.enable_conda ? "bioconda::bbmap=38.92 bioconda::samtools=1.13 pigz=2.6" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/mulled-v2-008daec56b7aaf3f162d7866758142b9f889d690:f5f55fc5623bb7b3f725e8d2f86bedacfd879510-0' : + 'quay.io/biocontainers/mulled-v2-008daec56b7aaf3f162d7866758142b9f889d690:f5f55fc5623bb7b3f725e8d2f86bedacfd879510-0' }" + + input: + tuple val(meta), path(bam) + + output: + tuple val(meta), path("*.stats.txt"), emit: covstats + tuple val(meta), path("*.hist.txt") , emit: hist + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + """ + pileup.sh \\ + -Xmx${task.memory.toGiga()}g \\ + in=${bam} \\ + out=${prefix}.coverage.stats.txt \\ + hist=${prefix}.coverage.hist.txt \\ + $args + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + bbmap: \$(bbversion.sh) + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + pigz: \$( pigz --version 2>&1 | sed 's/pigz //g' ) + END_VERSIONS + """ +} diff --git a/modules/bbmap/pileup/meta.yml b/modules/bbmap/pileup/meta.yml new file mode 100644 index 00000000..5cd85f9a --- /dev/null +++ b/modules/bbmap/pileup/meta.yml @@ -0,0 +1,47 @@ +name: "bbmap_pileup" +description: Calculates per-scaffold or per-base coverage information from an unsorted sam or bam file. +keywords: + - fasta + - genome + - coverage +tools: + - bbmap: + description: BBMap is a short read aligner, as well as various other bioinformatic tools. + homepage: https://jgi.doe.gov/data-and-tools/bbtools/bb-tools-user-guide/ + documentation: https://jgi.doe.gov/data-and-tools/bbtools/bb-tools-user-guide/ + tool_dev_url: "https://github.com/BioInfoTools/BBMap/blob/master/sh/pileup.sh" + doi: "" + licence: ["UC-LBL license (see package)"] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: BAM/CRAM/SAM file + pattern: "*.{bam,cram,sam}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - stats: + type: file + description: Per-scaffold coverage info + pattern: "*.stats.txt" + - hist: + type: file + description: "Histogram of # occurrences of each depth level" + pattern: "*.hist.txt" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + +authors: + - "@Emiller88" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index d3282321..2a93a846 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -106,6 +106,10 @@ bbmap/index: - modules/bbmap/index/** - tests/modules/bbmap/index/** +bbmap/pileup: + - modules/bbmap/pileup/** + - tests/modules/bbmap/pileup/** + bcftools/annotate: - modules/bcftools/annotate/** - tests/modules/bcftools/annotate/** diff --git a/tests/modules/bbmap/pileup/main.nf b/tests/modules/bbmap/pileup/main.nf new file mode 100644 index 00000000..943df9b5 --- /dev/null +++ b/tests/modules/bbmap/pileup/main.nf @@ -0,0 +1,15 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { BBMAP_PILEUP } from '../../../../modules/bbmap/pileup/main.nf' + +workflow test_bbmap_pileup { + + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true) + ] + + BBMAP_PILEUP ( input ) +} diff --git a/tests/modules/bbmap/pileup/nextflow.config b/tests/modules/bbmap/pileup/nextflow.config new file mode 100644 index 00000000..50f50a7a --- /dev/null +++ b/tests/modules/bbmap/pileup/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} \ No newline at end of file diff --git a/tests/modules/bbmap/pileup/test.yml b/tests/modules/bbmap/pileup/test.yml new file mode 100644 index 00000000..84814a7a --- /dev/null +++ b/tests/modules/bbmap/pileup/test.yml @@ -0,0 +1,12 @@ +- name: "bbmap pileup" + command: nextflow run ./tests/modules/bbmap/pileup -entry test_bbmap_pileup -c ./tests/config/nextflow.config -c ./tests/modules/bbmap/pileup/nextflow.config + tags: + - "bbmap" + - "bbmap/pileup" + files: + - path: "output/bbmap/test.coverage.stats.txt" + md5sum: c3fc9d0681589b69e3301ca3cb27b7a4 + - path: "output/bbmap/test.coverage.hist.txt" + md5sum: 96915920ef42ddc9483457dd4585a088 + - path: output/bbmap/versions.yml + md5sum: 894acc38bdc167dc22851df15e5a8453 From d16f1ea4d75a179bb672c8e8aec5948d3ee5f0bd Mon Sep 17 00:00:00 2001 From: "Robert A. Petit III" Date: Thu, 24 Mar 2022 20:11:04 -0600 Subject: [PATCH 060/283] add module for panaroo, fix pirate tests (#1444) * add module for panaroo * update pirate test yaml * update panaroo test yaml * update pirate test yaml md5s * Update main.nf * Update main.nf * pin bioperl version --- modules/panaroo/run/main.nf | 36 +++++++++++++ modules/panaroo/run/meta.yml | 47 +++++++++++++++++ modules/pirate/main.nf | 6 +-- tests/config/pytest_modules.yml | 4 ++ tests/modules/panaroo/run/main.nf | 19 +++++++ tests/modules/panaroo/run/nextflow.config | 8 +++ tests/modules/panaroo/run/test.yml | 38 ++++++++++++++ tests/modules/pirate/main.nf | 4 -- tests/modules/pirate/test.yml | 62 +++++++++++------------ 9 files changed, 186 insertions(+), 38 deletions(-) create mode 100644 modules/panaroo/run/main.nf create mode 100644 modules/panaroo/run/meta.yml create mode 100644 tests/modules/panaroo/run/main.nf create mode 100644 tests/modules/panaroo/run/nextflow.config create mode 100644 tests/modules/panaroo/run/test.yml diff --git a/modules/panaroo/run/main.nf b/modules/panaroo/run/main.nf new file mode 100644 index 00000000..d8be0c1e --- /dev/null +++ b/modules/panaroo/run/main.nf @@ -0,0 +1,36 @@ +process PANAROO_RUN { + tag "$meta.id" + label 'process_medium' + + conda (params.enable_conda ? "bioconda::panaroo=1.2.9" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/panaroo:1.2.9--pyhdfd78af_0': + 'quay.io/biocontainers/panaroo:1.2.9--pyhdfd78af_0' }" + + input: + tuple val(meta), path(gff) + + output: + tuple val(meta), path("results/*") , emit: results + tuple val(meta), path("results/core_gene_alignment.aln"), optional: true, emit: aln + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + """ + panaroo \\ + $args \\ + -t $task.cpus \\ + -o results \\ + -i $gff + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + panaroo: \$(echo \$(panaroo --version 2>&1) | sed 's/^.*panaroo //' )) + END_VERSIONS + """ +} diff --git a/modules/panaroo/run/meta.yml b/modules/panaroo/run/meta.yml new file mode 100644 index 00000000..eaeb41d0 --- /dev/null +++ b/modules/panaroo/run/meta.yml @@ -0,0 +1,47 @@ +name: "panaroo_run" +description: A fast and scalable tool for bacterial pangenome analysis +keywords: + - gff + - pan-genome + - alignment +tools: + - "panaroo": + description: "panaroo - an updated pipeline for pangenome investigation" + homepage: "https://gtonkinhill.github.io/panaroo/#/" + documentation: "https://gtonkinhill.github.io/panaroo/#/gettingstarted/quickstart" + tool_dev_url: "https://github.com/gtonkinhill/panaroo" + doi: "10.1186/s13059-020-02090-4" + licence: "['MIT']" + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - gff: + type: file + description: A set of GFF3 formatted files + pattern: "*.{gff}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - results: + type: directory + description: Directory containing Panaroo result files + pattern: "*/*" + - aln: + type: file + description: Core-genome alignment produced by Panaroo (Optional) + pattern: "*.{fasta}" + +authors: + - "@rpetit3" diff --git a/modules/pirate/main.nf b/modules/pirate/main.nf index 20544a1b..ebef5b37 100644 --- a/modules/pirate/main.nf +++ b/modules/pirate/main.nf @@ -2,10 +2,10 @@ process PIRATE { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? "bioconda::pirate=1.0.4" : null) + conda (params.enable_conda ? "bioconda::pirate=1.0.4 bioconda::perl-bioperl=1.7.2" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/pirate:1.0.4--hdfd78af_1' : - 'quay.io/biocontainers/pirate:1.0.4--hdfd78af_1' }" + 'https://depot.galaxyproject.org/singularity/pirate:1.0.4--hdfd78af_2' : + 'quay.io/biocontainers/pirate:1.0.4--hdfd78af_2' }" input: tuple val(meta), path(gff) diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 2a93a846..13bcbbd3 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -1248,6 +1248,10 @@ pairtools/sort: - modules/pairtools/sort/** - tests/modules/pairtools/sort/** +panaroo/run: + - modules/panaroo/run/** + - tests/modules/panaroo/run/** + pangolin: - modules/pangolin/** - tests/modules/pangolin/** diff --git a/tests/modules/panaroo/run/main.nf b/tests/modules/panaroo/run/main.nf new file mode 100644 index 00000000..1fab62ed --- /dev/null +++ b/tests/modules/panaroo/run/main.nf @@ -0,0 +1,19 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { PANAROO_RUN } from '../../../../modules/panaroo/run/main.nf' + +workflow test_panaroo_run { + + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['candidatus_portiera_aleyrodidarum']['genome']['test1_gff'], checkIfExists: true), + file(params.test_data['candidatus_portiera_aleyrodidarum']['genome']['test2_gff'], checkIfExists: true), + file(params.test_data['candidatus_portiera_aleyrodidarum']['genome']['test3_gff'], checkIfExists: true) + ] + ] + + PANAROO_RUN ( input ) +} diff --git a/tests/modules/panaroo/run/nextflow.config b/tests/modules/panaroo/run/nextflow.config new file mode 100644 index 00000000..af23e716 --- /dev/null +++ b/tests/modules/panaroo/run/nextflow.config @@ -0,0 +1,8 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: PANAROO_RUN { + ext.args = '--clean-mode strict -a core' + } +} diff --git a/tests/modules/panaroo/run/test.yml b/tests/modules/panaroo/run/test.yml new file mode 100644 index 00000000..20f0b814 --- /dev/null +++ b/tests/modules/panaroo/run/test.yml @@ -0,0 +1,38 @@ +- name: panaroo run test_panaroo_run + command: nextflow run tests/modules/panaroo/run -entry test_panaroo_run -c tests/config/nextflow.config + tags: + - panaroo/run + - panaroo + files: + - path: output/panaroo/results/combined_DNA_CDS.fasta + md5sum: 92bd4f6b9c84c84c0989d77d2558b7d1 + - path: output/panaroo/results/combined_protein_CDS.fasta + md5sum: 7a5636c4f2abc190f5dd4dfbb1da7034 + - path: output/panaroo/results/combined_protein_cdhit_out.txt + md5sum: 2164b81606712b53af2198b4d50eed08 + - path: output/panaroo/results/combined_protein_cdhit_out.txt.clstr + md5sum: aaa9fc2b797c932b6d9ff2df640113f1 + - path: output/panaroo/results/core_alignment_header.embl + contains: ["DNA", "feature", "locus_tag"] + - path: output/panaroo/results/core_gene_alignment.aln + contains: ["test1", "test2", "test3"] + - path: output/panaroo/results/final_graph.gml + contains: ["graph", "isolateNames", "test3"] + - path: output/panaroo/results/gene_data.csv + md5sum: 45df2d26ac81754b858c0e4757e834b2 + - path: output/panaroo/results/gene_presence_absence.Rtab + md5sum: 9efbed5df121dc0c4fbb58869b65c3e4 + - path: output/panaroo/results/gene_presence_absence.csv + contains: ["Gene", "unique", "test3"] + - path: output/panaroo/results/gene_presence_absence_roary.csv + contains: ["Gene", "Max", "Fragment"] + - path: output/panaroo/results/pan_genome_reference.fa + md5sum: d0b03cdfaab8965d86cb1045db3c2d20 + - path: output/panaroo/results/pre_filt_graph.gml + contains: ["graph", "node", "id"] + - path: output/panaroo/results/struct_presence_absence.Rtab + md5sum: 246017db1175f9b1683dfe60cb2e9b5b + - path: output/panaroo/results/summary_statistics.txt + md5sum: 9c73c416e1758bea06c67b4ec9fd0720 + - path: output/panaroo/versions.yml + md5sum: 909e7b046a8a87319986d34b7328641c diff --git a/tests/modules/pirate/main.nf b/tests/modules/pirate/main.nf index 05e5bdd8..45c60713 100644 --- a/tests/modules/pirate/main.nf +++ b/tests/modules/pirate/main.nf @@ -14,10 +14,6 @@ workflow test_pirate { file(params.test_data['candidatus_portiera_aleyrodidarum']['genome']['test3_gff'], checkIfExists: true) ] ] - // [ file("https://github.com/bactopia/bactopia-tests/raw/main/data/reference/gff/GCF_000292685.gff", checkIfExists: true), - // file("https://github.com/bactopia/bactopia-tests/raw/main/data/reference/gff/GCF_000298385.gff", checkIfExists: true), - // file("https://github.com/bactopia/bactopia-tests/raw/main/data/reference/gff/GCF_002849995.gff", checkIfExists: true) ] - // ] PIRATE ( input ) } diff --git a/tests/modules/pirate/test.yml b/tests/modules/pirate/test.yml index 6ef9e6e0..ffee5552 100644 --- a/tests/modules/pirate/test.yml +++ b/tests/modules/pirate/test.yml @@ -16,35 +16,35 @@ - path: output/pirate/results/PIRATE.unique_alleles.tsv contains: ["allele_name"] - path: output/pirate/results/binary_presence_absence.fasta - contains: ["GCF_000292685"] + contains: ["test1"] - path: output/pirate/results/binary_presence_absence.nwk - md5sum: 5b5d86bf97d97de37bb9db514abb7762 + md5sum: 1fcf7434911bbab39b74791259f1f989 - path: output/pirate/results/cluster_alleles.tab contains: ["g0001"] - - path: output/pirate/results/co-ords/GCF_000292685.co-ords.tab - md5sum: d5ca0f06ca7ea1f5486683d5859bc9b8 - - path: output/pirate/results/co-ords/GCF_000298385.co-ords.tab - md5sum: a24d6048b3074242bb558c7fa27a8b03 - - path: output/pirate/results/co-ords/GCF_002849995.co-ords.tab - md5sum: 0c08228585f4fa95686e9b025e0fe9c1 + - path: output/pirate/results/co-ords/test1.co-ords.tab + md5sum: f1e75c045b35bae260dadb1a2f000dfa + - path: output/pirate/results/co-ords/test2.co-ords.tab + md5sum: 953b0d2f1dfd4c3a6a6dd246c9174703 + - path: output/pirate/results/co-ords/test3.co-ords.tab + md5sum: 61f2c52ef77dc9a97a200c57b89b7d69 - path: output/pirate/results/genome2loci.tab - md5sum: bbcea5bfcdcafe14a9aa7261c8e931b8 + md5sum: 2d9cdefd5b1a7f5e20b0a70a6e5fa588 - path: output/pirate/results/genome_list.txt - md5sum: 6534b1635c258ad92b829077addc1ff5 + md5sum: c19ac63da7949b15179f42093cbf95b8 - path: output/pirate/results/link_clusters.log contains: ["parsing paralog file"] - path: output/pirate/results/loci_list.tab - contains: ["GCF_000298385_00242"] + contains: ["test1_00242"] - path: output/pirate/results/loci_paralog_categories.tab - md5sum: 6404d2a32526a398f42d7da768a389bd - - path: output/pirate/results/modified_gffs/GCF_000292685.gff - md5sum: 2b73bda2f84dc634303dc90e641040ca - - path: output/pirate/results/modified_gffs/GCF_000298385.gff - md5sum: b1a9d6557d47e09249f08a7acdbbd618 - - path: output/pirate/results/modified_gffs/GCF_002849995.gff - md5sum: 68532fc9bb639e6d83c731a069f60cf8 + md5sum: f8c3d31f445c3cb66bd95ba77be0a7b8 + - path: output/pirate/results/modified_gffs/test1.gff + md5sum: 5dc220e472165c3b693e1017f38fc0e3 + - path: output/pirate/results/modified_gffs/test2.gff + md5sum: 975f5c7fb363caf1be9c53e5e237bb5d + - path: output/pirate/results/modified_gffs/test3.gff + md5sum: a4735a5ac75ce82aadba8a3de6cabc2e - path: output/pirate/results/pan_sequences.fasta - md5sum: ed835c77fdb20c36aa9d5208eb7ca0cb + md5sum: 0492833706f0f969b49a3555c16ec40d - path: output/pirate/results/pangenome.connected_blocks.tsv contains: ["block_number"] - path: output/pirate/results/pangenome.edges @@ -59,29 +59,29 @@ contains: ["g0091"] - path: output/pirate/results/pangenome.temp - path: output/pirate/results/pangenome_iterations/pan_sequences.50.reclustered.reinflated - contains: ["GCF_000298385_00242"] + contains: ["test1_00242"] - path: output/pirate/results/pangenome_iterations/pan_sequences.60.reclustered.reinflated - contains: ["GCF_000298385_00242"] + contains: ["test1_00242"] - path: output/pirate/results/pangenome_iterations/pan_sequences.70.reclustered.reinflated - contains: ["GCF_000298385_00242"] + contains: ["test1_00242"] - path: output/pirate/results/pangenome_iterations/pan_sequences.80.reclustered.reinflated - contains: ["GCF_000298385_00242"] + contains: ["test1_00242"] - path: output/pirate/results/pangenome_iterations/pan_sequences.90.reclustered.reinflated - contains: ["GCF_000298385_00242"] + contains: ["test1_00242"] - path: output/pirate/results/pangenome_iterations/pan_sequences.95.reclustered.reinflated - contains: ["GCF_000298385_00242"] + contains: ["test1_00242"] - path: output/pirate/results/pangenome_iterations/pan_sequences.98.reclustered.reinflated - contains: ["GCF_000298385_00242"] + contains: ["test1_00242"] - path: output/pirate/results/pangenome_iterations/pan_sequences.blast.output - md5sum: 9da25d27684bfcc5488987ab2d1fd3a1 + md5sum: ffb3b5bf1ba24bf2bd0be677500139a2 - path: output/pirate/results/pangenome_iterations/pan_sequences.cdhit_clusters - contains: ["GCF_000298385_00081"] + contains: ["test1_00081"] - path: output/pirate/results/pangenome_iterations/pan_sequences.core_clusters.tab - contains: ["GCF_000298385_00242"] + contains: ["test1_00242"] - path: output/pirate/results/pangenome_iterations/pan_sequences.mcl_log.txt contains: ["chaos"] - path: output/pirate/results/pangenome_iterations/pan_sequences.representative.fasta - md5sum: 84668b6c65b57026a17a50b0edd02541 + md5sum: 6e483c773ed90bd50b33f2bd569343e4 - path: output/pirate/results/pangenome_iterations/pan_sequences.representative.fasta.pdb - path: output/pirate/results/pangenome_iterations/pan_sequences.representative.fasta.pot - path: output/pirate/results/pangenome_iterations/pan_sequences.representative.fasta.ptf @@ -95,4 +95,4 @@ - path: output/pirate/results/representative_sequences.ffn contains: ["representative_genome"] - path: output/pirate/results/split_groups.log - contains: ["g0213"] + contains: ["g0"] From e3a5795652ae9838111b33483a80ad733e7501c4 Mon Sep 17 00:00:00 2001 From: Edmund Miller Date: Thu, 24 Mar 2022 21:44:12 -0500 Subject: [PATCH 061/283] Homer/findpeaks improvements (#1436) * fix(homer): Update expected file path to improve caching * docs(homer): Update findpeaks * test(homer): Add maketagdirectory dependency Co-authored-by: Robert A. Petit III --- modules/homer/findpeaks/main.nf | 4 ++-- modules/homer/findpeaks/meta.yml | 6 ++++-- tests/config/pytest_modules.yml | 1 + 3 files changed, 7 insertions(+), 4 deletions(-) diff --git a/modules/homer/findpeaks/main.nf b/modules/homer/findpeaks/main.nf index b9947774..4858d6da 100644 --- a/modules/homer/findpeaks/main.nf +++ b/modules/homer/findpeaks/main.nf @@ -13,8 +13,8 @@ process HOMER_FINDPEAKS { tuple val(meta), path(tagDir) output: - tuple val(meta), path("*peaks.txt"), emit: txt - path "versions.yml" , emit: versions + tuple val(meta), path("*.peaks.txt"), emit: txt + path "versions.yml" , emit: versions when: task.ext.when == null || task.ext.when diff --git a/modules/homer/findpeaks/meta.yml b/modules/homer/findpeaks/meta.yml index e7cef0cd..72eb8d0c 100644 --- a/modules/homer/findpeaks/meta.yml +++ b/modules/homer/findpeaks/meta.yml @@ -7,7 +7,9 @@ tools: - homer: description: | HOMER (Hypergeometric Optimization of Motif EnRichment) is a suite of tools for Motif Discovery and next-gen sequencing analysis. - documentation: http://homer.ucsd.edu/homer/ + homepage: "http://homer.ucsd.edu/homer/index.html" + documentation: "http://homer.ucsd.edu/homer/" + tool_dev_url: "http://homer.ucsd.edu/homer/ngs/peaks.html" doi: 10.1016/j.molcel.2010.05.004. licence: ["GPL-3.0-or-later"] input: @@ -29,7 +31,7 @@ output: - peaks: type: file description: The found peaks - pattern: "*peaks.txt" + pattern: "*.peaks.txt" - versions: type: file description: File containing software versions diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 13bcbbd3..a211acbc 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -876,6 +876,7 @@ homer/annotatepeaks: homer/findpeaks: - modules/homer/findpeaks/** + - modules/homer/maketagdirectory/** - tests/modules/homer/findpeaks/** homer/maketagdirectory: From b5825fe6b336352024aebccd274da1d131188bfc Mon Sep 17 00:00:00 2001 From: FriederikeHanssen Date: Fri, 25 Mar 2022 07:46:51 +0100 Subject: [PATCH 062/283] Mpileup also likes intervals (#1445) * Mpileup also likes intervals * Also update meta yml with inclusive input and intervals Co-authored-by: Sateesh Peri <33637490+sateeshperi@users.noreply.github.com> Co-authored-by: Maxime U. Garcia --- modules/samtools/mpileup/main.nf | 5 +++-- modules/samtools/mpileup/meta.yml | 6 +++++- tests/modules/samtools/mpileup/main.nf | 13 ++++++++++++- tests/modules/samtools/mpileup/test.yml | 19 ++++++++++++++++--- 4 files changed, 36 insertions(+), 7 deletions(-) diff --git a/modules/samtools/mpileup/main.nf b/modules/samtools/mpileup/main.nf index cea40321..474a2492 100644 --- a/modules/samtools/mpileup/main.nf +++ b/modules/samtools/mpileup/main.nf @@ -8,7 +8,7 @@ process SAMTOOLS_MPILEUP { 'quay.io/biocontainers/samtools:1.15--h1170115_1' }" input: - tuple val(meta), path(bam) + tuple val(meta), path(input), path(intervals) path fasta output: @@ -21,12 +21,13 @@ process SAMTOOLS_MPILEUP { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" + def intervals = intervals ? "-l ${intervals}" : "" """ samtools mpileup \\ --fasta-ref $fasta \\ --output ${prefix}.mpileup \\ $args \\ - $bam + $input cat <<-END_VERSIONS > versions.yml "${task.process}": samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') diff --git a/modules/samtools/mpileup/meta.yml b/modules/samtools/mpileup/meta.yml index c384f5c6..ae499e92 100644 --- a/modules/samtools/mpileup/meta.yml +++ b/modules/samtools/mpileup/meta.yml @@ -21,7 +21,7 @@ input: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - bam: + - input: type: file description: BAM/CRAM/SAM file pattern: "*.{bam,cram,sam}" @@ -29,6 +29,10 @@ input: type: file description: FASTA reference file pattern: "*.{fasta,fa}" + - intervals: + type: file + description: Interval FILE + pattern: "*.bed" output: - meta: type: map diff --git a/tests/modules/samtools/mpileup/main.nf b/tests/modules/samtools/mpileup/main.nf index dc58cc2c..f7fd4b43 100644 --- a/tests/modules/samtools/mpileup/main.nf +++ b/tests/modules/samtools/mpileup/main.nf @@ -6,7 +6,18 @@ include { SAMTOOLS_MPILEUP } from '../../../../modules/samtools/mpileup/main.nf' workflow test_samtools_mpileup { input = [ [ id:'test', single_end:false ], // meta map - file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + [] + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + SAMTOOLS_MPILEUP ( input, fasta ) +} + +workflow test_samtools_mpileup_intervals { + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true) ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) diff --git a/tests/modules/samtools/mpileup/test.yml b/tests/modules/samtools/mpileup/test.yml index 53a9c142..405263d1 100644 --- a/tests/modules/samtools/mpileup/test.yml +++ b/tests/modules/samtools/mpileup/test.yml @@ -1,8 +1,21 @@ -- name: samtools mpileup - command: nextflow run ./tests/modules/samtools/mpileup -entry test_samtools_mpileup -c ./tests/config/nextflow.config -c ./tests/modules/samtools/mpileup/nextflow.config +- name: samtools mpileup test_samtools_mpileup + command: nextflow run tests/modules/samtools/mpileup -entry test_samtools_mpileup -c tests/config/nextflow.config tags: - samtools - samtools/mpileup files: - - path: ./output/samtools/test.mpileup + - path: output/samtools/test.mpileup md5sum: 958e6bead4103d72026f80153b6b5150 + - path: output/samtools/versions.yml + md5sum: 26350e1e145451f0b807911db029861e + +- name: samtools mpileup test_samtools_mpileup_intervals + command: nextflow run tests/modules/samtools/mpileup -entry test_samtools_mpileup_intervals -c tests/config/nextflow.config + tags: + - samtools + - samtools/mpileup + files: + - path: output/samtools/test.mpileup + md5sum: 958e6bead4103d72026f80153b6b5150 + - path: output/samtools/versions.yml + md5sum: 11d8118a558efb9db6798453862d719c From e786457fb0da9653659b921fc47985554f28273c Mon Sep 17 00:00:00 2001 From: "Maxime U. Garcia" Date: Fri, 25 Mar 2022 11:26:47 +0100 Subject: [PATCH 063/283] feat: add readgroup info + reorder samtools command line to mimic bwamem (#1447) --- modules/dragmap/align/main.nf | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/modules/dragmap/align/main.nf b/modules/dragmap/align/main.nf index 9f261cc2..ee94a9a8 100644 --- a/modules/dragmap/align/main.nf +++ b/modules/dragmap/align/main.nf @@ -24,16 +24,18 @@ process DRAGMAP_ALIGN { def args = task.ext.args ?: '' def args2 = task.ext.args2 ?: '' def prefix = task.ext.prefix ?: "${meta.id}" + def read_group = meta.read_group ? "--RGSM ${meta.read_group}" : "" def samtools_command = sort_bam ? 'sort' : 'view' if (meta.single_end) { """ dragen-os \\ -r $hashmap \\ - -1 $reads \\ - --num-threads $task.cpus \\ $args \\ + $read_group \\ + --num-threads $task.cpus \\ + -1 $reads \\ 2> ${prefix}.dragmap.log \\ - | samtools $samtools_command -@ $task.cpus $args2 -o ${prefix}.bam - + | samtools $samtools_command $args2 --threads $task.cpus -o ${prefix}.bam - cat <<-END_VERSIONS > versions.yml "${task.process}": @@ -46,12 +48,13 @@ process DRAGMAP_ALIGN { """ dragen-os \\ -r $hashmap \\ + $args \\ + $read_group \\ + --num-threads $task.cpus \\ -1 ${reads[0]} \\ -2 ${reads[1]} \\ - --num-threads $task.cpus \\ - $args \\ 2> ${prefix}.dragmap.log \\ - | samtools $samtools_command -@ $task.cpus $args2 -o ${prefix}.bam - + | samtools $samtools_command $args2 --threads $task.cpus -o ${prefix}.bam - cat <<-END_VERSIONS > versions.yml "${task.process}": From 3d31fa4d04177579e86044bf111588376e1a0c12 Mon Sep 17 00:00:00 2001 From: FriederikeHanssen Date: Fri, 25 Mar 2022 14:01:57 +0100 Subject: [PATCH 064/283] CAT: Use meta map and supply output file name via modules.config (#1446) * Use meta map and supply output file name via modules.config * Remove all def declarations to make it work * update tests & remove extra . * fix ze tests * update meta.yml with meta map info * add tag line now that meta is available --- modules/cat/cat/main.nf | 20 ++++++++------- modules/cat/cat/meta.yml | 9 ++++--- tests/modules/cat/cat/main.nf | 37 ++++++++++++++++----------- tests/modules/cat/cat/nextflow.config | 8 ++++++ tests/modules/cat/cat/test.yml | 4 +-- 5 files changed, 49 insertions(+), 29 deletions(-) diff --git a/modules/cat/cat/main.nf b/modules/cat/cat/main.nf index 4ee44599..25dcc652 100644 --- a/modules/cat/cat/main.nf +++ b/modules/cat/cat/main.nf @@ -1,4 +1,5 @@ process CAT_CAT { + tag "$meta.id" label 'process_low' conda (params.enable_conda ? "conda-forge::pigz=2.3.4" : null) @@ -7,12 +8,11 @@ process CAT_CAT { 'quay.io/biocontainers/pigz:2.3.4' }" input: - path files_in - val file_out + tuple val(meta), path(files_in) output: - path "${file_out}*" , emit: file_out - path "versions.yml" , emit: versions + tuple val(meta), path("${prefix}"), emit: file_out + path "versions.yml" , emit: versions when: task.ext.when == null || task.ext.when @@ -29,16 +29,18 @@ process CAT_CAT { // | gzipped | ungzipped | zcat | | // | ungzipped | gzipped | cat | pigz | - def in_zip = file_list[0].endsWith('.gz') - def out_zip = file_out.endsWith('.gz') - def command1 = (in_zip && !out_zip) ? 'zcat' : 'cat' - def command2 = (!in_zip && out_zip) ? "| pigz -c -p $task.cpus $args2" : '' + // Use input file ending as default + prefix = task.ext.prefix ?: "${meta.id}${file_list[0].substring(file_list[0].lastIndexOf('.'))}" + out_zip = prefix.endsWith('.gz') + in_zip = file_list[0].endsWith('.gz') + command1 = (in_zip && !out_zip) ? 'zcat' : 'cat' + command2 = (!in_zip && out_zip) ? "| pigz -c -p $task.cpus $args2" : '' """ $command1 \\ $args \\ ${file_list.join(' ')} \\ $command2 \\ - > $file_out + > ${prefix} cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/modules/cat/cat/meta.yml b/modules/cat/cat/meta.yml index e0a6361d..5eeff5a6 100644 --- a/modules/cat/cat/meta.yml +++ b/modules/cat/cat/meta.yml @@ -12,13 +12,15 @@ tools: tool_dev_url: None licence: ["GPL-3.0-or-later"] input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] - files_in: type: file description: List of compressed / uncompressed files pattern: "*" - - file_out: - type: value - description: Full name of output file with or without .gz extension output: - versions: @@ -32,3 +34,4 @@ output: authors: - "@erikrikarddaniel" + - "@FriederikeHanssen" diff --git a/tests/modules/cat/cat/main.nf b/tests/modules/cat/cat/main.nf index 1fd56851..24e1d00d 100644 --- a/tests/modules/cat/cat/main.nf +++ b/tests/modules/cat/cat/main.nf @@ -2,53 +2,60 @@ nextflow.enable.dsl = 2 -include { CAT_CAT } from '../../../../modules/cat/cat/main.nf' +include { CAT_CAT } from '../../../../modules/cat/cat/main.nf' +include { CAT_CAT as CAT_UNZIPPED_ZIPPED } from '../../../../modules/cat/cat/main.nf' +include { CAT_CAT as CAT_ZIPPED_UNZIPPED } from '../../../../modules/cat/cat/main.nf' workflow test_cat_unzipped_unzipped { input = [ - file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true), - file(params.test_data['sarscov2']['genome']['genome_sizes'], checkIfExists: true) + [ id:'test', single_end:true ], // meta map + [ file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true), + file(params.test_data['sarscov2']['genome']['genome_sizes'], checkIfExists: true) ] ] - CAT_CAT ( input, 'cat.txt' ) + CAT_CAT ( input ) } workflow test_cat_zipped_zipped { input = [ - file(params.test_data['sarscov2']['genome']['genome_gff3_gz'], checkIfExists: true), - file(params.test_data['sarscov2']['genome']['contigs_genome_maf_gz'], checkIfExists: true) + [ id:'test', single_end:true ], // meta map + [file(params.test_data['sarscov2']['genome']['genome_gff3_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['genome']['contigs_genome_maf_gz'], checkIfExists: true)] ] - CAT_CAT ( input, 'cat.txt.gz' ) + CAT_CAT ( input ) } workflow test_cat_zipped_unzipped { input = [ - file(params.test_data['sarscov2']['genome']['genome_gff3_gz'], checkIfExists: true), - file(params.test_data['sarscov2']['genome']['contigs_genome_maf_gz'], checkIfExists: true) + [ id:'test', single_end:true ], // meta map + [file(params.test_data['sarscov2']['genome']['genome_gff3_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['genome']['contigs_genome_maf_gz'], checkIfExists: true)] ] - CAT_CAT ( input, 'cat.txt' ) + CAT_ZIPPED_UNZIPPED ( input ) } workflow test_cat_unzipped_zipped { input = [ - file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true), - file(params.test_data['sarscov2']['genome']['genome_sizes'], checkIfExists: true) + [ id:'test', single_end:true ], // meta map + [file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true), + file(params.test_data['sarscov2']['genome']['genome_sizes'], checkIfExists: true)] ] - CAT_CAT ( input, 'cat.txt.gz' ) + CAT_UNZIPPED_ZIPPED ( input ) } workflow test_cat_one_file_unzipped_zipped { input = [ - file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true), + [ id:'test', single_end:true ], // meta map + file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ] - CAT_CAT ( input, 'cat.txt.gz' ) + CAT_UNZIPPED_ZIPPED ( input ) } diff --git a/tests/modules/cat/cat/nextflow.config b/tests/modules/cat/cat/nextflow.config index 8730f1c4..b0077a9c 100644 --- a/tests/modules/cat/cat/nextflow.config +++ b/tests/modules/cat/cat/nextflow.config @@ -2,4 +2,12 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + withName: CAT_UNZIPPED_ZIPPED { + ext.prefix = 'cat.txt.gz' + } + + withName: CAT_ZIPPED_UNZIPPED { + ext.prefix = 'cat.txt' + } } + diff --git a/tests/modules/cat/cat/test.yml b/tests/modules/cat/cat/test.yml index 1244d8d2..d6e6595e 100644 --- a/tests/modules/cat/cat/test.yml +++ b/tests/modules/cat/cat/test.yml @@ -4,7 +4,7 @@ - cat - cat/cat files: - - path: output/cat/cat.txt + - path: output/cat/test.fasta md5sum: f44b33a0e441ad58b2d3700270e2dbe2 - name: cat zipped zipped @@ -13,7 +13,7 @@ - cat - cat/cat files: - - path: output/cat/cat.txt.gz + - path: output/cat/test.gz - name: cat zipped unzipped command: nextflow run ./tests/modules/cat/cat -entry test_cat_zipped_unzipped -c ./tests/config/nextflow.config -c ./tests/modules/cat/cat/nextflow.config From 98ed71c8f6968bc689f170e2b79d0866e3f28cda Mon Sep 17 00:00:00 2001 From: Edmund Miller Date: Fri, 25 Mar 2022 08:29:36 -0500 Subject: [PATCH 065/283] feat(homer): Add pos2bed module (#1435) * feat(homer): Add pos2bed module * test(homer): Pass bed format * test(homer): Add upstream dependencies to avoid regressions * Update modules/homer/pos2bed/main.nf Co-authored-by: FriederikeHanssen --- modules/homer/pos2bed/main.nf | 33 ++++++++++++++++ modules/homer/pos2bed/meta.yml | 42 +++++++++++++++++++++ tests/config/pytest_modules.yml | 6 +++ tests/modules/homer/pos2bed/main.nf | 19 ++++++++++ tests/modules/homer/pos2bed/nextflow.config | 9 +++++ tests/modules/homer/pos2bed/test.yml | 10 +++++ 6 files changed, 119 insertions(+) create mode 100644 modules/homer/pos2bed/main.nf create mode 100644 modules/homer/pos2bed/meta.yml create mode 100644 tests/modules/homer/pos2bed/main.nf create mode 100644 tests/modules/homer/pos2bed/nextflow.config create mode 100644 tests/modules/homer/pos2bed/test.yml diff --git a/modules/homer/pos2bed/main.nf b/modules/homer/pos2bed/main.nf new file mode 100644 index 00000000..b85a30b5 --- /dev/null +++ b/modules/homer/pos2bed/main.nf @@ -0,0 +1,33 @@ +def VERSION = '4.11' // Version information not provided by tool on CLI + +process HOMER_POS2BED { + tag "$meta.id" + label 'process_medium' + + conda (params.enable_conda ? "bioconda::homer=4.11" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/homer:4.11--pl526hc9558a2_3' : + 'quay.io/biocontainers/homer:4.11--pl526hc9558a2_3' }" + + input: + tuple val(meta), path(peaks) + + output: + tuple val(meta), path("*.bed"), emit: bed + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + """ + pos2bed.pl $peaks > ${prefix}.bed + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + homer: $VERSION + END_VERSIONS + """ +} diff --git a/modules/homer/pos2bed/meta.yml b/modules/homer/pos2bed/meta.yml new file mode 100644 index 00000000..fb75bb1f --- /dev/null +++ b/modules/homer/pos2bed/meta.yml @@ -0,0 +1,42 @@ +name: "homer_pos2bed" +description: Coverting from HOMER peak to BED file formats +keywords: + - peaks +tools: + - "homer": + description: | + HOMER (Hypergeometric Optimization of Motif EnRichment) is a suite of tools for Motif Discovery and next-gen sequencing analysis. + homepage: "http://homer.ucsd.edu/homer/index.html" + documentation: "http://homer.ucsd.edu/homer/" + tool_dev_url: "http://homer.ucsd.edu/homer/ngs/miscellaneous.html" + doi: 10.1016/j.molcel.2010.05.004. + licence: ["GPL-3.0-or-later"] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - tagDir: + type: directory + description: "The 'Tag Directory'" + pattern: "tagDir" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bed: + type: file + description: BED file + pattern: "*.bed" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + +authors: + - "@Emiller88" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index a211acbc..17a122b7 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -887,6 +887,12 @@ homer/makeucscfile: - modules/homer/makeucscfile/** - tests/modules/homer/makeucscfile/** +homer/pos2bed: + - modules/homer/pos2bed/** + - modules/homer/maketagdirectory/** + - modules/homer/findpeaks/** + - tests/modules/homer/pos2bed/** + hpsuissero: - modules/hpsuissero/** - tests/modules/hpsuissero/** diff --git a/tests/modules/homer/pos2bed/main.nf b/tests/modules/homer/pos2bed/main.nf new file mode 100644 index 00000000..bcfdc0bc --- /dev/null +++ b/tests/modules/homer/pos2bed/main.nf @@ -0,0 +1,19 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { HOMER_MAKETAGDIRECTORY } from '../../../../modules/homer/maketagdirectory/main.nf' +include { HOMER_FINDPEAKS } from '../../../../modules/homer/findpeaks/main.nf' +include { HOMER_POS2BED } from '../../../../modules/homer/pos2bed/main.nf' + +workflow test_homer_pos2bed { + input = [[id:'test'], + [file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true), + file(params.test_data['sarscov2']['genome']['test2_bed'], checkIfExists: true)]] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + HOMER_MAKETAGDIRECTORY (input, fasta) + HOMER_FINDPEAKS ( HOMER_MAKETAGDIRECTORY.out.tagdir ) + + HOMER_POS2BED ( HOMER_FINDPEAKS.out.txt ) +} diff --git a/tests/modules/homer/pos2bed/nextflow.config b/tests/modules/homer/pos2bed/nextflow.config new file mode 100644 index 00000000..a4c88475 --- /dev/null +++ b/tests/modules/homer/pos2bed/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: HOMER_MAKETAGDIRECTORY { + ext.args = '-format bed' + } + +} diff --git a/tests/modules/homer/pos2bed/test.yml b/tests/modules/homer/pos2bed/test.yml new file mode 100644 index 00000000..525d61c8 --- /dev/null +++ b/tests/modules/homer/pos2bed/test.yml @@ -0,0 +1,10 @@ +- name: "homer pos2bed" + command: nextflow run ./tests/modules/homer/pos2bed -entry test_homer_pos2bed -c ./tests/config/nextflow.config -c ./tests/modules/homer/pos2bed/nextflow.config + tags: + - "homer" + - "homer/pos2bed" + files: + - path: "output/homer/test.bed" + md5sum: 0b9ebd8f06b9c820a551fbdb2d7635ee + - path: output/homer/versions.yml + md5sum: 1485f4b2d76484e8fe3310e2505de2fd From 1fb1801dcbbc556d04985bda5ba4329a4f50543f Mon Sep 17 00:00:00 2001 From: Edmund Miller Date: Fri, 25 Mar 2022 08:54:09 -0500 Subject: [PATCH 066/283] refactor(homer): Update output path for bedGraph (#1438) This allow for caching because otherwise it modifies the tag directory input --- modules/homer/makeucscfile/main.nf | 6 +++--- modules/homer/makeucscfile/meta.yml | 2 +- tests/modules/homer/makeucscfile/test.yml | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/modules/homer/makeucscfile/main.nf b/modules/homer/makeucscfile/main.nf index c82435ea..379fad3d 100644 --- a/modules/homer/makeucscfile/main.nf +++ b/modules/homer/makeucscfile/main.nf @@ -13,8 +13,8 @@ process HOMER_MAKEUCSCFILE { tuple val(meta), path(tagDir) output: - tuple val(meta), path("tag_dir/*ucsc.bedGraph.gz"), emit: bedGraph - path "versions.yml" , emit: versions + tuple val(meta), path("*.bedGraph.gz"), emit: bedGraph + path "versions.yml" , emit: versions when: task.ext.when == null || task.ext.when @@ -25,7 +25,7 @@ process HOMER_MAKEUCSCFILE { """ makeUCSCfile \\ $tagDir \\ - -o auto \\ + -o ${prefix}.bedGraph \\ $args cat <<-END_VERSIONS > versions.yml diff --git a/modules/homer/makeucscfile/meta.yml b/modules/homer/makeucscfile/meta.yml index 273f456e..039fd37f 100644 --- a/modules/homer/makeucscfile/meta.yml +++ b/modules/homer/makeucscfile/meta.yml @@ -30,7 +30,7 @@ output: - bedGraph: type: file description: The UCSC bed graph - pattern: "tag_dir/*ucsc.bedGraph.gz" + pattern: "*.bedGraph.gz" - versions: type: file description: File containing software versions diff --git a/tests/modules/homer/makeucscfile/test.yml b/tests/modules/homer/makeucscfile/test.yml index cf3d1b4d..e6ccb9fa 100644 --- a/tests/modules/homer/makeucscfile/test.yml +++ b/tests/modules/homer/makeucscfile/test.yml @@ -4,4 +4,4 @@ - homer - homer/makeucscfile files: - - path: output/homer/tag_dir/tag_dir.ucsc.bedGraph.gz + - path: output/homer/test.bedGraph.gz From a0664567353fae8fdcee1162ee3f015a91271062 Mon Sep 17 00:00:00 2001 From: Simon Pearce <24893913+SPPearce@users.noreply.github.com> Date: Fri, 25 Mar 2022 14:25:07 +0000 Subject: [PATCH 067/283] Add tests for umitools extract and dedup (#1429) * NGSCheckMate v1 * Add some tests for UMItools * Added tests for dedup * Include pytest * Delete main.nf * Delete meta.yml * Delete main.nf * Delete nextflow.config * Delete test.yml * add prettier * Add direct test on bam * Update tests/modules/umitools/dedup/main.nf Co-authored-by: Edmund Miller * Update tests/modules/umitools/dedup/main.nf Co-authored-by: Edmund Miller * Update tests/config/pytest_modules.yml Co-authored-by: Edmund Miller * Not ignore-umi Co-authored-by: Simon Pearce Co-authored-by: Maxime U. Garcia Co-authored-by: Edmund Miller --- tests/config/pytest_modules.yml | 9 +++ tests/modules/umitools/dedup/main.nf | 56 +++++++++++++++++++ tests/modules/umitools/dedup/nextflow.config | 17 ++++++ tests/modules/umitools/dedup/test.yml | 54 ++++++++++++++++++ tests/modules/umitools/extract/main.nf | 29 ++++++++++ .../modules/umitools/extract/nextflow.config | 9 +++ tests/modules/umitools/extract/test.yml | 27 +++++++++ 7 files changed, 201 insertions(+) create mode 100644 tests/modules/umitools/dedup/main.nf create mode 100644 tests/modules/umitools/dedup/nextflow.config create mode 100644 tests/modules/umitools/dedup/test.yml create mode 100644 tests/modules/umitools/extract/main.nf create mode 100644 tests/modules/umitools/extract/nextflow.config create mode 100644 tests/modules/umitools/extract/test.yml diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 17a122b7..78c3fe56 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -1763,6 +1763,15 @@ ultra/pipeline: - modules/ultra/pipeline/** - tests/modules/ultra/pipeline/** +umitools/dedup: + - modules/umitools/dedup/** + - modules/umitools/extract/** + - tests/modules/umitools/dedup/** + +umitools/extract: + - modules/umitools/extract/** + - tests/modules/umitools/extract/** + unicycler: - modules/unicycler/** - tests/modules/unicycler/** diff --git a/tests/modules/umitools/dedup/main.nf b/tests/modules/umitools/dedup/main.nf new file mode 100644 index 00000000..f89ba935 --- /dev/null +++ b/tests/modules/umitools/dedup/main.nf @@ -0,0 +1,56 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { UMITOOLS_EXTRACT } from '../../../../modules/umitools/extract/main.nf' +include { BWA_INDEX } from '../../../../modules/bwa/index/main.nf' +include { BWA_MEM } from '../../../../modules/bwa/mem/main.nf' +include { SAMTOOLS_INDEX } from '../../../../modules/samtools/index/main.nf' +include { UMITOOLS_DEDUP } from '../../../../modules/umitools/dedup/main.nf' + +// +// Test with no UMI +// +workflow test_umitools_dedup_no_umi { + input = [ [ id:'test'], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) ], + [ file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true) ] + ] + + UMITOOLS_DEDUP ( input ) +} + +// +// Test with single-end data +// +workflow test_umitools_dedup_single_end { + input = [ [ id:'test', single_end:true ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] + ] + + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + UMITOOLS_EXTRACT ( input ) + BWA_INDEX ( fasta ) + BWA_MEM ( UMITOOLS_EXTRACT.out.reads, BWA_INDEX.out.index, true ) + SAMTOOLS_INDEX (BWA_MEM.out.bam) + UMITOOLS_DEDUP(BWA_MEM.out.bam.join(SAMTOOLS_INDEX.out.bai, by: [0])) +} + +// +// Test with paired-end data +// +workflow test_umitools_dedup_paired_end { + input = [ [ id:'test', single_end:false ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] + ] + + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + UMITOOLS_EXTRACT ( input ) + BWA_INDEX ( fasta ) + BWA_MEM ( UMITOOLS_EXTRACT.out.reads, BWA_INDEX.out.index, true ) + SAMTOOLS_INDEX (BWA_MEM.out.bam) + UMITOOLS_DEDUP(BWA_MEM.out.bam.join(SAMTOOLS_INDEX.out.bai, by: [0])) +} diff --git a/tests/modules/umitools/dedup/nextflow.config b/tests/modules/umitools/dedup/nextflow.config new file mode 100644 index 00000000..4a7533ef --- /dev/null +++ b/tests/modules/umitools/dedup/nextflow.config @@ -0,0 +1,17 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: UMITOOLS_EXTRACT { + ext.args = '--bc-pattern="NNNN"' + } + + withName: UMITOOLS_DEDUP { + ext.args = '' + ext.prefix = 'dedup' + } + + withName: BWA_MEM { + ext.args2 = '' + } +} diff --git a/tests/modules/umitools/dedup/test.yml b/tests/modules/umitools/dedup/test.yml new file mode 100644 index 00000000..2ba9073b --- /dev/null +++ b/tests/modules/umitools/dedup/test.yml @@ -0,0 +1,54 @@ +- name: umitools dedup test_umitools_dedup_no_umi + command: nextflow run tests/modules/umitools/dedup -entry test_umitools_dedup_no_umi -c tests/config/nextflow.config + tags: + - umitools/dedup + - umitools + files: + - path: output/umitools/dedup.bam + md5sum: 53b4edc399db81b87d2343e78af73cf0 + - path: output/umitools/dedup_edit_distance.tsv + md5sum: 65186b0964e2f8d970cc04d736d8b119 + - path: output/umitools/dedup_per_umi.tsv + md5sum: 8e6783a4a79437b095f095f2aefe7c01 + - path: output/umitools/dedup_per_umi_per_position.tsv + md5sum: 9386db4a104b8e4e32f3ca4a84efa4ac + - path: output/umitools/versions.yml + md5sum: 4aaaa33565bcd9a984255139933d6446 + +- name: umitools dedup test_umitools_dedup_single_end + command: nextflow run tests/modules/umitools/dedup -entry test_umitools_dedup_single_end -c tests/config/nextflow.config + tags: + - umitools + - umitools/dedup + files: + - path: output/bwa/test.bam + md5sum: ea41a3cdca1856b22845e1067fd31f37 + - path: output/bwa/versions.yml + md5sum: ce4d987f2c53f4c01b31d210c357b24a + - path: output/samtools/test.bam.bai + md5sum: 095af0ad3921212597ffd7c342ecd5a0 + - path: output/samtools/versions.yml + md5sum: 69b7cde627c9b4e8403dfc125db71cc7 + - path: output/umitools/dedup.bam + md5sum: d95df177063432748ff33f473910cb1e + - path: output/umitools/versions.yml + md5sum: 730e768dd199d2f5bfb6fd0850446344 + +- name: umitools dedup test_umitools_dedup_paired_end + command: nextflow run tests/modules/umitools/dedup -entry test_umitools_dedup_paired_end -c tests/config/nextflow.config + tags: + - umitools + - umitools/dedup + files: + - path: output/bwa/test.bam + md5sum: 1ad786cae0ff2254c655e3a206929617 + - path: output/bwa/versions.yml + md5sum: b524c5ddf61c20f4a0a93ae8fc78b851 + - path: output/samtools/test.bam.bai + md5sum: 7496f4056a8e86327ca93e350f282fc2 + - path: output/samtools/versions.yml + md5sum: 72fc2ab934fd4bca0f7f14a705530d34 + - path: output/umitools/dedup.bam + md5sum: e8d1eae2aacef76254948c5568e94555 + - path: output/umitools/versions.yml + md5sum: fd39e05042d354b3d8de49b617d3183d diff --git a/tests/modules/umitools/extract/main.nf b/tests/modules/umitools/extract/main.nf new file mode 100644 index 00000000..3baac16a --- /dev/null +++ b/tests/modules/umitools/extract/main.nf @@ -0,0 +1,29 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { UMITOOLS_EXTRACT } from '../../../../modules/umitools/extract/main.nf' + +// +// Test with single-end data +// +workflow test_umitools_extract_single_end { + input = [ [ id:'test', single_end:true ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] + ] + + UMITOOLS_EXTRACT ( input ) +} + +// +// Test with paired-end data +// +workflow test_umitools_extract_paired_end { + input = [ [ id:'test', single_end:false ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] + ] + + UMITOOLS_EXTRACT ( input ) +} + diff --git a/tests/modules/umitools/extract/nextflow.config b/tests/modules/umitools/extract/nextflow.config new file mode 100644 index 00000000..628f5fcd --- /dev/null +++ b/tests/modules/umitools/extract/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: UMITOOLS_EXTRACT { + ext.args = '--bc-pattern="NNNN"' + } + +} diff --git a/tests/modules/umitools/extract/test.yml b/tests/modules/umitools/extract/test.yml new file mode 100644 index 00000000..336cd122 --- /dev/null +++ b/tests/modules/umitools/extract/test.yml @@ -0,0 +1,27 @@ +- name: umitools extract test_umitools_extract_single_end + command: nextflow run tests/modules/umitools/extract -entry test_umitools_extract_single_end -c tests/config/nextflow.config + tags: + - umitools/extract + - umitools + files: + - path: output/umitools/test.umi_extract.fastq.gz + should_exist: true + - path: output/umitools/test.umi_extract.log + contains: ["job finished in"] + - path: output/umitools/versions.yml + md5sum: 397e6972343f9d7b8eae387fc18c12c7 + +- name: umitools extract test_umitools_extract_paired_end + command: nextflow run tests/modules/umitools/extract -entry test_umitools_extract_paired_end -c tests/config/nextflow.config + tags: + - umitools/extract + - umitools + files: + - path: output/umitools/test.umi_extract.log + contains: ["job finished in"] + - path: output/umitools/test.umi_extract_1.fastq.gz + should_exist: true + - path: output/umitools/test.umi_extract_2.fastq.gz + should_exist: true + - path: output/umitools/versions.yml + md5sum: 0aec6f919d62b7b79f6d0c5d79411464 From 7111e571cc5b6069de4673cd6165af680f17b4d7 Mon Sep 17 00:00:00 2001 From: Edmund Miller Date: Fri, 25 Mar 2022 12:11:40 -0500 Subject: [PATCH 068/283] Split preseq lcextrap and ccurve (#1440) * fix(preseq): Update lcextrap file name * chore(preseq): Bump lcextrap image * feat: Initialize preseq c_curve * docs(preseq): Update documentation --- modules/preseq/ccurve/main.nf | 40 +++++++++++++++++ modules/preseq/ccurve/meta.yml | 48 +++++++++++++++++++++ modules/preseq/lcextrap/main.nf | 12 +++--- modules/preseq/lcextrap/meta.yml | 9 ++-- tests/config/pytest_modules.yml | 4 ++ tests/modules/preseq/ccurve/main.nf | 25 +++++++++++ tests/modules/preseq/ccurve/nextflow.config | 5 +++ tests/modules/preseq/ccurve/test.yml | 19 ++++++++ tests/modules/preseq/lcextrap/test.yml | 4 +- 9 files changed, 154 insertions(+), 12 deletions(-) create mode 100644 modules/preseq/ccurve/main.nf create mode 100644 modules/preseq/ccurve/meta.yml create mode 100644 tests/modules/preseq/ccurve/main.nf create mode 100644 tests/modules/preseq/ccurve/nextflow.config create mode 100644 tests/modules/preseq/ccurve/test.yml diff --git a/modules/preseq/ccurve/main.nf b/modules/preseq/ccurve/main.nf new file mode 100644 index 00000000..febf83cc --- /dev/null +++ b/modules/preseq/ccurve/main.nf @@ -0,0 +1,40 @@ +process PRESEQ_CCURVE { + tag "$meta.id" + label 'process_medium' + label 'error_ignore' + + conda (params.enable_conda ? "bioconda::preseq=3.1.2" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/preseq:3.1.2--h445547b_2': + 'quay.io/biocontainers/preseq:3.1.2--h445547b_2' }" + + input: + tuple val(meta), path(bam) + + output: + tuple val(meta), path("*.c_curve.txt"), emit: c_curve + tuple val(meta), path("*.log") , emit: log + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def paired_end = meta.single_end ? '' : '-pe' + """ + preseq \\ + c_curve \\ + $args \\ + $paired_end \\ + -output ${prefix}.c_curve.txt \\ + $bam + cp .command.err ${prefix}.command.log + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + preseq: \$(echo \$(preseq 2>&1) | sed 's/^.*Version: //; s/Usage:.*\$//') + END_VERSIONS + """ +} diff --git a/modules/preseq/ccurve/meta.yml b/modules/preseq/ccurve/meta.yml new file mode 100644 index 00000000..86ed6296 --- /dev/null +++ b/modules/preseq/ccurve/meta.yml @@ -0,0 +1,48 @@ +name: preseq_ccurve +description: Software for predicting library complexity and genome coverage in high-throughput sequencing +keywords: + - preseq + - library + - complexity +tools: + - preseq: + description: Software for predicting library complexity and genome coverage in high-throughput sequencing + homepage: http://smithlabresearch.org/software/preseq/ + documentation: http://smithlabresearch.org/wp-content/uploads/manual.pdf + tool_dev_url: https://github.com/smithlabcode/preseq + doi: "" + licence: ["GPL"] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: BAM/CRAM/SAM file + pattern: "*.{bam,cram,sam}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - ccurve: + type: file + description: File containing output of Preseq c curve + pattern: "*.{c_curve.txt}" + - log: + type: file + description: Log file containing stderr produced by Preseq + pattern: "*.{log}" + +authors: + - "@drpatelh" + - "@Emiller88" diff --git a/modules/preseq/lcextrap/main.nf b/modules/preseq/lcextrap/main.nf index d6dd19e2..97261557 100644 --- a/modules/preseq/lcextrap/main.nf +++ b/modules/preseq/lcextrap/main.nf @@ -5,16 +5,16 @@ process PRESEQ_LCEXTRAP { conda (params.enable_conda ? "bioconda::preseq=3.1.2" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/preseq:3.1.2--h06ef8b0_1' : - 'quay.io/biocontainers/preseq:3.1.2--h06ef8b0_1' }" + 'https://depot.galaxyproject.org/singularity/preseq:3.1.2--h445547b_2': + 'quay.io/biocontainers/preseq:3.1.2--h445547b_2' }" input: tuple val(meta), path(bam) output: - tuple val(meta), path("*.ccurve.txt"), emit: ccurve - tuple val(meta), path("*.log") , emit: log - path "versions.yml" , emit: versions + tuple val(meta), path("*.lc_extrap.txt"), emit: lc_extrap + tuple val(meta), path("*.log") , emit: log + path "versions.yml" , emit: versions when: task.ext.when == null || task.ext.when @@ -28,7 +28,7 @@ process PRESEQ_LCEXTRAP { lc_extrap \\ $args \\ $paired_end \\ - -output ${prefix}.ccurve.txt \\ + -output ${prefix}.lc_extrap.txt \\ $bam cp .command.err ${prefix}.command.log diff --git a/modules/preseq/lcextrap/meta.yml b/modules/preseq/lcextrap/meta.yml index 0e33df25..f1be05a2 100755 --- a/modules/preseq/lcextrap/meta.yml +++ b/modules/preseq/lcextrap/meta.yml @@ -8,8 +8,8 @@ tools: - preseq: description: Software for predicting library complexity and genome coverage in high-throughput sequencing homepage: http://smithlabresearch.org/software/preseq/ - documentation: None - tool_dev_url: None + documentation: http://smithlabresearch.org/wp-content/uploads/manual.pdf + tool_dev_url: https://github.com/smithlabcode/preseq doi: "" licence: ["GPL"] @@ -34,10 +34,10 @@ output: type: file description: File containing software versions pattern: "versions.yml" - - ccurve: + - lc_extrap: type: file description: File containing output of Preseq lcextrap - pattern: "*.{ccurve.txt}" + pattern: "*.{lc_extrap.txt}" - log: type: file description: Log file containing stderr produced by Preseq @@ -45,3 +45,4 @@ output: authors: - "@drpatelh" + - "@Emiller88" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 78c3fe56..183708dc 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -1367,6 +1367,10 @@ porechop: - modules/porechop/** - tests/modules/porechop/** +preseq/ccurve: + - modules/preseq/ccurve/** + - tests/modules/preseq/ccurve/** + preseq/lcextrap: - modules/preseq/lcextrap/** - tests/modules/preseq/lcextrap/** diff --git a/tests/modules/preseq/ccurve/main.nf b/tests/modules/preseq/ccurve/main.nf new file mode 100644 index 00000000..59149563 --- /dev/null +++ b/tests/modules/preseq/ccurve/main.nf @@ -0,0 +1,25 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { PRESEQ_CCURVE } from '../../../../modules/preseq/ccurve/main.nf' + +workflow test_preseq_ccurve_single_end { + + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true) + ] + + PRESEQ_CCURVE ( input ) +} + +workflow test_preseq_ccurve_paired_end { + + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true) + ] + + PRESEQ_CCURVE ( input ) +} diff --git a/tests/modules/preseq/ccurve/nextflow.config b/tests/modules/preseq/ccurve/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/preseq/ccurve/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/preseq/ccurve/test.yml b/tests/modules/preseq/ccurve/test.yml new file mode 100644 index 00000000..c94dc978 --- /dev/null +++ b/tests/modules/preseq/ccurve/test.yml @@ -0,0 +1,19 @@ +- name: preseq ccurve single-end + command: nextflow run ./tests/modules/preseq/ccurve -entry test_preseq_ccurve_single_end -c ./tests/config/nextflow.config -c ./tests/modules/preseq/ccurve/nextflow.config + tags: + - preseq + - preseq/ccurve + files: + - path: output/preseq/test.c_curve.txt + md5sum: cf4743abdd355595d6ec1fb3f38e66e5 + - path: output/preseq/test.command.log + +- name: preseq ccurve paired-end + command: nextflow run ./tests/modules/preseq/ccurve -entry test_preseq_ccurve_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/preseq/ccurve/nextflow.config + tags: + - preseq + - preseq/ccurve + files: + - path: output/preseq/test.c_curve.txt + md5sum: cf4743abdd355595d6ec1fb3f38e66e5 + - path: output/preseq/test.command.log diff --git a/tests/modules/preseq/lcextrap/test.yml b/tests/modules/preseq/lcextrap/test.yml index ecd1d046..f5b5aea4 100644 --- a/tests/modules/preseq/lcextrap/test.yml +++ b/tests/modules/preseq/lcextrap/test.yml @@ -4,7 +4,7 @@ - preseq - preseq/lcextrap files: - - path: output/preseq/test.ccurve.txt + - path: output/preseq/test.lc_extrap.txt md5sum: 1fa5cdd601079329618f61660bee00de - path: output/preseq/test.command.log @@ -14,6 +14,6 @@ - preseq - preseq/lcextrap files: - - path: output/preseq/test.ccurve.txt + - path: output/preseq/test.lc_extrap.txt md5sum: 10e5ea860e87fb6f5dc10f4f20c62040 - path: output/preseq/test.command.log From 27936729f21245558c0425b3284c8bdfd4b805aa Mon Sep 17 00:00:00 2001 From: Edmund Miller Date: Fri, 25 Mar 2022 12:13:30 -0500 Subject: [PATCH 069/283] Add samtools to homer/maketagdirectory to use bam files (#1437) * fix(homer): Update dependancies for maketagdirectory Closes #1363 * test(homer): Add tests for bam usage * style(homer): Rename meta test * fix(homer): Update args order and add missing \\ * docs(homer): Add DESeq2 and edgeR Co-authored-by: FriederikeHanssen * test(homer): Update md5sum * test(homer): bam => sam * fix(homer): Use nextflow conda recipe instead of biocontainers with commas Co-authored-by: FriederikeHanssen --- modules/homer/maketagdirectory/main.nf | 13 ++++---- modules/homer/maketagdirectory/meta.yml | 33 +++++++++++++++++-- tests/modules/homer/maketagdirectory/main.nf | 23 +++++++++---- .../homer/maketagdirectory/nextflow.config | 6 +++- tests/modules/homer/maketagdirectory/test.yml | 29 ++++++++++++---- 5 files changed, 81 insertions(+), 23 deletions(-) diff --git a/modules/homer/maketagdirectory/main.nf b/modules/homer/maketagdirectory/main.nf index 262998d5..0ab855da 100644 --- a/modules/homer/maketagdirectory/main.nf +++ b/modules/homer/maketagdirectory/main.nf @@ -4,13 +4,13 @@ process HOMER_MAKETAGDIRECTORY { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? "bioconda::homer=4.11=pl526hc9558a2_3" : null) + conda (params.enable_conda ? "bioconda::homer=4.11 bioconda::samtools=1.11 conda-forge::r-base=4.0.2 bioconda::bioconductor-deseq2=1.30.0 bioconda::bioconductor-edger=3.32.0 anaconda::perl=5.26.2" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/homer:4.11--pl526hc9558a2_3' : - 'quay.io/biocontainers/homer:4.11--pl526hc9558a2_3' }" + 'https://depot.galaxyproject.org/singularity/mulled-v2-29293b111ffe5b4c1d1e14c711264aaed6b97b4a:594338b771cacf1623bd27772b5e12825f8835f2-0' : + 'quay.io/biocontainers/mulled-v2-29293b111ffe5b4c1d1e14c711264aaed6b97b4a:594338b771cacf1623bd27772b5e12825f8835f2-0' }" input: - tuple val(meta), path(bed) + tuple val(meta), path(bam) path fasta output: @@ -26,13 +26,14 @@ process HOMER_MAKETAGDIRECTORY { """ makeTagDirectory \\ tag_dir \\ + -genome $fasta \\ $args \\ - $bed \\ - -genome $fasta + $bam cat <<-END_VERSIONS > versions.yml "${task.process}": homer: $VERSION + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ } diff --git a/modules/homer/maketagdirectory/meta.yml b/modules/homer/maketagdirectory/meta.yml index 2472e0f0..31d59ee7 100644 --- a/modules/homer/maketagdirectory/meta.yml +++ b/modules/homer/maketagdirectory/meta.yml @@ -3,6 +3,8 @@ description: Create a tag directory with the HOMER suite keywords: - peaks - bed + - bam + - sam tools: - homer: description: | @@ -10,16 +12,41 @@ tools: documentation: http://homer.ucsd.edu/homer/ doi: 10.1016/j.molcel.2010.05.004. licence: ["GPL-3.0-or-later"] + - samtools: + description: | + SAMtools is a set of utilities for interacting with and post-processing + short DNA sequence read alignments in the SAM, BAM and CRAM formats, written by Heng Li. + These files are generated as output by short read aligners like BWA. + homepage: http://www.htslib.org/ + documentation: hhttp://www.htslib.org/doc/samtools.html + doi: 10.1093/bioinformatics/btp352 + licence: ["MIT"] + - DESeq2: + description: | + Differential gene expression analysis based on the negative binomial distribution + homepage: "https://bioconductor.org/packages/DESeq2" + documentation: "https://bioconductor.org/packages/DESeq2" + tool_dev_url: "https://github.com/mikelove/DESeq2" + doi: 10.18129/B9.bioc.DESeq2 + licence: ["LGPL-3.0-or-later"] + - edgeR: + description: | + Empirical Analysis of Digital Gene Expression Data in R + homepage: "https://bioinf.wehi.edu.au/edgeR" + documentation: "https://bioconductor.org/packages/edgeR" + tool_dev_url: " https://git.bioconductor.org/packages/edgeR" + doi: 10.18129/B9.bioc.edgeR + licence: ["GPL >=2"] input: - meta: type: map description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - bed: + - bam: type: file - description: The peak files in bed format - pattern: "*.bed" + description: BAM/BED/SAM file + pattern: "*.{bam,bed,sam}" - fasta: type: file description: Fasta file of reference genome diff --git a/tests/modules/homer/maketagdirectory/main.nf b/tests/modules/homer/maketagdirectory/main.nf index 766aff0d..7de693e6 100644 --- a/tests/modules/homer/maketagdirectory/main.nf +++ b/tests/modules/homer/maketagdirectory/main.nf @@ -2,19 +2,22 @@ nextflow.enable.dsl = 2 -include { HOMER_MAKETAGDIRECTORY } from '../../../../modules/homer/maketagdirectory/main.nf' +include { + HOMER_MAKETAGDIRECTORY as HOMER_MAKETAGDIRECTORY_BED + HOMER_MAKETAGDIRECTORY as HOMER_MAKETAGDIRECTORY_BAM +} from '../../../../modules/homer/maketagdirectory/main.nf' -workflow test_homer_maketagdirectory { +workflow test_homer_maketagdirectory_bed { input = [[id:'test'], [file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true), file(params.test_data['sarscov2']['genome']['test2_bed'], checkIfExists: true)]] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) - HOMER_MAKETAGDIRECTORY (input, fasta) + HOMER_MAKETAGDIRECTORY_BED (input, fasta) } -workflow test_homer_meta_maketagdirectory { +workflow test_homer_maketagdirectory_meta { input = [[[ id:'test1'], [file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true)]], @@ -25,8 +28,14 @@ workflow test_homer_meta_maketagdirectory { meta_input = [[id: 'meta_test']] + [ input.collect{it[1]}.flatten() ] - HOMER_MAKETAGDIRECTORY (meta_input, fasta) + HOMER_MAKETAGDIRECTORY_BED (meta_input, fasta) } -// TODO Make a failing bam test -// TODO Make a pass bam test that feeds the bam through samtools first +workflow test_homer_maketagdirectory_bam { + input = [[id:'test'], + [file(params.test_data['sarscov2']['illumina']['test_single_end_sorted_bam'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true)]] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + + HOMER_MAKETAGDIRECTORY_BAM (input, fasta) +} diff --git a/tests/modules/homer/maketagdirectory/nextflow.config b/tests/modules/homer/maketagdirectory/nextflow.config index 81587d69..9d7a3f9f 100644 --- a/tests/modules/homer/maketagdirectory/nextflow.config +++ b/tests/modules/homer/maketagdirectory/nextflow.config @@ -2,8 +2,12 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } - withName: HOMER_MAKETAGDIRECTORY { + withName: HOMER_MAKETAGDIRECTORY_BED { ext.args = '-format bed' } + withName: HOMER_MAKETAGDIRECTORY_BAM { + ext.args = '-format sam' + } + } diff --git a/tests/modules/homer/maketagdirectory/test.yml b/tests/modules/homer/maketagdirectory/test.yml index 746c6ef6..28e74c32 100644 --- a/tests/modules/homer/maketagdirectory/test.yml +++ b/tests/modules/homer/maketagdirectory/test.yml @@ -1,5 +1,5 @@ -- name: homer maketagdirectory - command: nextflow run ./tests/modules/homer/maketagdirectory -entry test_homer_maketagdirectory -c ./tests/config/nextflow.config -c ./tests/modules/homer/maketagdirectory/nextflow.config +- name: homer maketagdirectory bed + command: nextflow run ./tests/modules/homer/maketagdirectory -entry test_homer_maketagdirectory_bed -c ./tests/config/nextflow.config -c ./tests/modules/homer/maketagdirectory/nextflow.config tags: - homer - homer/maketagdirectory @@ -11,12 +11,12 @@ - path: output/homer/tag_dir/tagCountDistribution.txt md5sum: fd4ee7ce7c5dfd7c9d739534b8180578 - path: output/homer/tag_dir/tagInfo.txt - md5sum: 816baa642c946f8284eaa465638e9abb + md5sum: ff56f30411b221b847aa4e6e9a6098a1 - path: output/homer/tag_dir/tagLengthDistribution.txt md5sum: e5aa2b9843ca9c04ace297280aed6af4 -- name: homer meta maketagdirectory - command: nextflow run ./tests/modules/homer/maketagdirectory -entry test_homer_meta_maketagdirectory -c ./tests/config/nextflow.config -c ./tests/modules/homer/maketagdirectory/nextflow.config +- name: homer maketagdirectory meta + command: nextflow run ./tests/modules/homer/maketagdirectory -entry test_homer_maketagdirectory_meta -c ./tests/config/nextflow.config -c ./tests/modules/homer/maketagdirectory/nextflow.config tags: - homer - homer/maketagdirectory @@ -28,6 +28,23 @@ - path: output/homer/tag_dir/tagCountDistribution.txt md5sum: fd4ee7ce7c5dfd7c9d739534b8180578 - path: output/homer/tag_dir/tagInfo.txt - md5sum: 816baa642c946f8284eaa465638e9abb + md5sum: ff56f30411b221b847aa4e6e9a6098a1 - path: output/homer/tag_dir/tagLengthDistribution.txt md5sum: e5aa2b9843ca9c04ace297280aed6af4 + +- name: homer maketagdirectory bam + command: nextflow run ./tests/modules/homer/maketagdirectory -entry test_homer_maketagdirectory_bam -c ./tests/config/nextflow.config -c ./tests/modules/homer/maketagdirectory/nextflow.config + tags: + - homer + - homer/maketagdirectory + files: + - path: output/homer/tag_dir/MT192765.1.tags.tsv + md5sum: 365808c4751ef6dd7085ac52037a22bc + - path: output/homer/tag_dir/tagAutocorrelation.txt + md5sum: 8b396f2aef1cdd3af4fab57b142d3250 + - path: output/homer/tag_dir/tagCountDistribution.txt + md5sum: afc6d007096c3872bbe84c9dc8edb832 + - path: output/homer/tag_dir/tagInfo.txt + md5sum: fbaf46eeb8a0723fa8b5eabd93f9d821 + - path: output/homer/tag_dir/tagLengthDistribution.txt + md5sum: 44f231adb2a705ae81950808c55cf248 From 4efa8da5c5bd8b68d667ddade7ed398e16c145f6 Mon Sep 17 00:00:00 2001 From: FriederikeHanssen Date: Fri, 25 Mar 2022 18:22:17 +0100 Subject: [PATCH 070/283] controlfreec significance (#1451) * controlfreec significance * move freec files to own subfolder * Fix meta.yml naming * Fix meta.yml naming * Fix linting * Forgot to refactor * forgot more refactoring * Too much refactoring on output paths * Too little refactoring here * update checksum --- .../controlfreec/assesssignificance/main.nf | 30 +++++++++++ .../controlfreec/assesssignificance/meta.yml | 50 +++++++++++++++++++ modules/controlfreec/{ => freec}/main.nf | 2 +- modules/controlfreec/{ => freec}/meta.yml | 4 +- tests/config/pytest_modules.yml | 10 ++-- .../controlfreec/assesssignificance/main.nf | 42 ++++++++++++++++ .../{ => assesssignificance}/nextflow.config | 2 +- .../controlfreec/assesssignificance/test.yml | 10 ++++ .../modules/controlfreec/{ => freec}/main.nf | 27 +++++----- .../controlfreec/freec/nextflow.config | 26 ++++++++++ .../modules/controlfreec/{ => freec}/test.yml | 7 +-- 11 files changed, 187 insertions(+), 23 deletions(-) create mode 100644 modules/controlfreec/assesssignificance/main.nf create mode 100644 modules/controlfreec/assesssignificance/meta.yml rename modules/controlfreec/{ => freec}/main.nf (99%) rename modules/controlfreec/{ => freec}/meta.yml (99%) create mode 100644 tests/modules/controlfreec/assesssignificance/main.nf rename tests/modules/controlfreec/{ => assesssignificance}/nextflow.config (96%) create mode 100644 tests/modules/controlfreec/assesssignificance/test.yml rename tests/modules/controlfreec/{ => freec}/main.nf (66%) create mode 100644 tests/modules/controlfreec/freec/nextflow.config rename tests/modules/controlfreec/{ => freec}/test.yml (78%) diff --git a/modules/controlfreec/assesssignificance/main.nf b/modules/controlfreec/assesssignificance/main.nf new file mode 100644 index 00000000..dc9c6e86 --- /dev/null +++ b/modules/controlfreec/assesssignificance/main.nf @@ -0,0 +1,30 @@ +process CONTROLFREEC_ASSESSSIGNIFICANCE { + tag "$meta.id" + label 'process_low' + + conda (params.enable_conda ? "bioconda::control-freec=11.6" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/control-freec:11.6--h1b792b2_1': + 'quay.io/biocontainers/control-freec:11.6--h1b792b2_1' }" + + input: + tuple val(meta), path(cnvs), path(ratio) + + output: + tuple val(meta), path("*.p.value.txt"), emit: bam + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + """ + cat /usr/local/bin/assess_significance.R | R --slave --args ${cnvs} ${ratio} + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + controlfreec: \$(echo \$(freec -version 2>&1) | sed 's/^.*Control-FREEC //; s/:.*\$//' | sed -e "s/Control-FREEC v//g" ) + END_VERSIONS + """ +} diff --git a/modules/controlfreec/assesssignificance/meta.yml b/modules/controlfreec/assesssignificance/meta.yml new file mode 100644 index 00000000..0451cca3 --- /dev/null +++ b/modules/controlfreec/assesssignificance/meta.yml @@ -0,0 +1,50 @@ +name: controlfreec_assesssignificance +description: Add both Wilcoxon test and Kolmogorov-Smirnov test p-values to each CNV output of FREEC +keywords: + - cna + - cnv + - somatic + - single + - tumor-only +tools: + - controlfreec/assesssignificance: + description: Copy number and genotype annotation from whole genome and whole exome sequencing data. + homepage: http://boevalab.inf.ethz.ch/FREEC + documentation: http://boevalab.inf.ethz.ch/FREEC/tutorial.html + tool_dev_url: https://github.com/BoevaLab/FREEC/ + doi: "10.1093/bioinformatics/btq635" + licence: ["GPL >=2"] + +input: + # Only when we have meta + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - cnvs: + type: file + description: _CNVs file generated by FREEC + pattern: "*._CNVs" + - ratio: + type: file + description: ratio file generated by FREEC + pattern: "*.ratio.txt" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - p_value_txt: + type: file + description: CNV file containing p_values for each call + pattern: "*.p.value.txt" + +authors: + - "@FriederikeHanssen" diff --git a/modules/controlfreec/main.nf b/modules/controlfreec/freec/main.nf similarity index 99% rename from modules/controlfreec/main.nf rename to modules/controlfreec/freec/main.nf index 21084f64..ba48ea1e 100644 --- a/modules/controlfreec/main.nf +++ b/modules/controlfreec/freec/main.nf @@ -1,4 +1,4 @@ -process CONTROLFREEC { +process CONTROLFREEC_FREEC { tag "$meta.id" label 'process_low' diff --git a/modules/controlfreec/meta.yml b/modules/controlfreec/freec/meta.yml similarity index 99% rename from modules/controlfreec/meta.yml rename to modules/controlfreec/freec/meta.yml index b2a6772b..a9a7375e 100644 --- a/modules/controlfreec/meta.yml +++ b/modules/controlfreec/freec/meta.yml @@ -1,4 +1,4 @@ -name: controlfreec +name: controlfreec_freec description: Copy number and genotype annotation from whole genome and whole exome sequencing data keywords: - cna @@ -7,7 +7,7 @@ keywords: - single - tumor-only tools: - - controlfreec: + - controlfreec/freec: description: Copy number and genotype annotation from whole genome and whole exome sequencing data. homepage: http://boevalab.inf.ethz.ch/FREEC documentation: http://boevalab.inf.ethz.ch/FREEC/tutorial.html diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 183708dc..20d9ad1d 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -419,9 +419,13 @@ cnvkit/batch: - modules/cnvkit/batch/** - tests/modules/cnvkit/batch/** -controlfreec: - - modules/controlfreec/** - - tests/modules/controlfreec/** +controlfreec/assesssignificance: + - modules/controlfreec/assesssignificance/** + - tests/modules/controlfreec/assesssignificance/** + +controlfreec/freec: + - modules/controlfreec/freec/** + - tests/modules/controlfreec/freec/** cooler/cload: - modules/cooler/cload/** diff --git a/tests/modules/controlfreec/assesssignificance/main.nf b/tests/modules/controlfreec/assesssignificance/main.nf new file mode 100644 index 00000000..f8d8aa1d --- /dev/null +++ b/tests/modules/controlfreec/assesssignificance/main.nf @@ -0,0 +1,42 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { CONTROLFREEC_ASSESSSIGNIFICANCE } from '../../../../modules/controlfreec/assesssignificance/main.nf' +include { CONTROLFREEC_FREEC } from '../../../../modules/controlfreec/freec/main.nf' +include { UNTAR } from '../../../../modules/untar/main.nf' + +workflow test_controlfreec_assesssignificance { + + input = [ + [ id:'test', single_end:false, sex:'XX' ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_mpileup'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_mpileup'], checkIfExists: true), + [],[],[],[] + ] + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta_fai'], checkIfExists: true) + + dbsnp = file(params.test_data['homo_sapiens']['genome']['dbsnp_138_hg38_21_vcf_gz'], checkIfExists: true) + dbsnp_tbi = file(params.test_data['homo_sapiens']['genome']['dbsnp_138_hg38_21_vcf_gz_tbi'], checkIfExists: true) + + chrfiles = [ [], file(params.test_data['homo_sapiens']['genome']['genome_21_chromosomes_dir'], checkIfExists: true) ] + target_bed = file(params.test_data['homo_sapiens']['genome']['genome_21_multi_interval_bed'], checkIfExists: true) + + UNTAR(chrfiles) + CONTROLFREEC_FREEC (input, + fasta, + fai, + [], + dbsnp, + dbsnp_tbi, + UNTAR.out.untar.map{ it[1] }, + [], + target_bed, + [] + ) + + sig_in = CONTROLFREEC_FREEC.out.CNV.join(CONTROLFREEC_FREEC.out.ratio) + CONTROLFREEC_ASSESSSIGNIFICANCE ( sig_in ) +} diff --git a/tests/modules/controlfreec/nextflow.config b/tests/modules/controlfreec/assesssignificance/nextflow.config similarity index 96% rename from tests/modules/controlfreec/nextflow.config rename to tests/modules/controlfreec/assesssignificance/nextflow.config index 5c4250be..65273dea 100644 --- a/tests/modules/controlfreec/nextflow.config +++ b/tests/modules/controlfreec/assesssignificance/nextflow.config @@ -2,7 +2,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } - withName:CONTROLFREEC{ + withName:CONTROLFREEC_FREEC{ ext.args = { [ "sample":[ inputformat: 'pileup', diff --git a/tests/modules/controlfreec/assesssignificance/test.yml b/tests/modules/controlfreec/assesssignificance/test.yml new file mode 100644 index 00000000..58f83f43 --- /dev/null +++ b/tests/modules/controlfreec/assesssignificance/test.yml @@ -0,0 +1,10 @@ +- name: controlfreec assesssignificance test_controlfreec_assesssignificance + command: nextflow run tests/modules/controlfreec/assesssignificance -entry test_controlfreec_assesssignificance -c tests/config/nextflow.config + tags: + - controlfreec/assesssignificance + - controlfreec + files: + - path: output/controlfreec/test2.mpileup.gz_CNVs.p.value.txt + md5sum: 44e23b916535fbc1a3f47b57fad292df + - path: output/controlfreec/versions.yml + md5sum: 0aa42fed10d61e4570fe1e0e83ffe932 diff --git a/tests/modules/controlfreec/main.nf b/tests/modules/controlfreec/freec/main.nf similarity index 66% rename from tests/modules/controlfreec/main.nf rename to tests/modules/controlfreec/freec/main.nf index 247f9887..d14c8f65 100644 --- a/tests/modules/controlfreec/main.nf +++ b/tests/modules/controlfreec/freec/main.nf @@ -2,9 +2,10 @@ nextflow.enable.dsl = 2 -include { CONTROLFREEC } from '../../../modules/controlfreec/main.nf' -include { UNTAR } from '../../../modules/untar/main.nf' -workflow test_controlfreec { +include { CONTROLFREEC_FREEC } from '../../../../modules/controlfreec/freec/main.nf' +include { UNTAR } from '../../../../modules/untar/main.nf' + +workflow test_controlfreec_freec { input = [ [ id:'test', single_end:false, sex:'XX' ], // meta map @@ -23,15 +24,15 @@ workflow test_controlfreec { target_bed = file(params.test_data['homo_sapiens']['genome']['genome_21_multi_interval_bed'], checkIfExists: true) UNTAR(chrfiles) - CONTROLFREEC ( input, - fasta, - fai, - [], - dbsnp, - dbsnp_tbi, - UNTAR.out.untar.map{ it[1] }, - [], - target_bed, - [] + CONTROLFREEC_FREEC (input, + fasta, + fai, + [], + dbsnp, + dbsnp_tbi, + UNTAR.out.untar.map{ it[1] }, + [], + target_bed, + [] ) } diff --git a/tests/modules/controlfreec/freec/nextflow.config b/tests/modules/controlfreec/freec/nextflow.config new file mode 100644 index 00000000..65273dea --- /dev/null +++ b/tests/modules/controlfreec/freec/nextflow.config @@ -0,0 +1,26 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName:CONTROLFREEC_FREEC{ + ext.args = { [ + "sample":[ + inputformat: 'pileup', + mateorientation: 'FR' + ], + "general" :[ + bedgraphoutput: "TRUE", + noisydata: "TRUE", + minexpectedgc: "0", + readcountthreshold: "1", + sex: meta.sex, + window: "10", + ], + "control":[ + inputformat: "pileup", + mateorientation: "FR" + ] + ] + } + } +} diff --git a/tests/modules/controlfreec/test.yml b/tests/modules/controlfreec/freec/test.yml similarity index 78% rename from tests/modules/controlfreec/test.yml rename to tests/modules/controlfreec/freec/test.yml index 14c30205..d50fc063 100644 --- a/tests/modules/controlfreec/test.yml +++ b/tests/modules/controlfreec/freec/test.yml @@ -1,7 +1,8 @@ -- name: controlfreec test_controlfreec - command: nextflow run tests/modules/controlfreec -entry test_controlfreec -c tests/config/nextflow.config +- name: controlfreec test_controlfreec_freec + command: nextflow run tests/modules/controlfreec/freec -entry test_controlfreec_freec -c tests/config/nextflow.config tags: - controlfreec + - controlfreec/freec files: - path: output/controlfreec/config.txt - path: output/controlfreec/test.mpileup.gz_control.cpn @@ -19,4 +20,4 @@ - path: output/controlfreec/test2.mpileup.gz_sample.cpn md5sum: c80dad58a77b1d7ba6d273999f4b4b4b - path: output/controlfreec/versions.yml - md5sum: ff93f6466d4686aab708425782c6c848 + md5sum: 3ab250a2ab3be22628124c7c65324651 From 8a64e73af29a8096e1996e0496df4ae8c449c40b Mon Sep 17 00:00:00 2001 From: FriederikeHanssen Date: Fri, 25 Mar 2022 20:31:52 +0100 Subject: [PATCH 071/283] add freec2bed script (#1453) --- modules/controlfreec/freec2bed/main.nf | 31 +++++++++++++ modules/controlfreec/freec2bed/meta.yml | 45 +++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/controlfreec/freec2bed/main.nf | 41 +++++++++++++++++ .../controlfreec/freec2bed/nextflow.config | 27 +++++++++++ tests/modules/controlfreec/freec2bed/test.yml | 8 ++++ 6 files changed, 156 insertions(+) create mode 100644 modules/controlfreec/freec2bed/main.nf create mode 100644 modules/controlfreec/freec2bed/meta.yml create mode 100644 tests/modules/controlfreec/freec2bed/main.nf create mode 100644 tests/modules/controlfreec/freec2bed/nextflow.config create mode 100644 tests/modules/controlfreec/freec2bed/test.yml diff --git a/modules/controlfreec/freec2bed/main.nf b/modules/controlfreec/freec2bed/main.nf new file mode 100644 index 00000000..880e4716 --- /dev/null +++ b/modules/controlfreec/freec2bed/main.nf @@ -0,0 +1,31 @@ +process CONTROLFREEC_FREEC2BED { + tag "$meta.id" + label 'process_low' + + conda (params.enable_conda ? "bioconda::control-freec=11.6" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/control-freec:11.6--h1b792b2_1': + 'quay.io/biocontainers/control-freec:11.6--h1b792b2_1' }" + + input: + tuple val(meta), path(ratio) + + output: + tuple val(meta), path("*.bed"), emit: bed + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + """ + freec2bed.pl -f ${ratio} ${args} > ${prefix}.bed + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + controlfreec: \$(echo \$(freec -version 2>&1) | sed 's/^.*Control-FREEC //; s/:.*\$//' | sed -e "s/Control-FREEC v//g" ) + END_VERSIONS + """ +} diff --git a/modules/controlfreec/freec2bed/meta.yml b/modules/controlfreec/freec2bed/meta.yml new file mode 100644 index 00000000..47fff8ab --- /dev/null +++ b/modules/controlfreec/freec2bed/meta.yml @@ -0,0 +1,45 @@ +name: controlfreec_freec2bed +description: Plot Freec output +keywords: + - cna + - cnv + - somatic + - single + - tumor-only +tools: + - controlfreec: + description: Copy number and genotype annotation from whole genome and whole exome sequencing data. + homepage: http://boevalab.inf.ethz.ch/FREEC + documentation: http://boevalab.inf.ethz.ch/FREEC/tutorial.html + tool_dev_url: https://github.com/BoevaLab/FREEC/ + doi: "10.1093/bioinformatics/btq635" + licence: ["GPL >=2"] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - ratio: + type: file + description: ratio file generated by FREEC + pattern: "*.ratio.txt" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - bed: + type: file + description: Bed file + pattern: "*.bed" + +authors: + - "@FriederikeHanssen" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 20d9ad1d..0c77497d 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -427,6 +427,10 @@ controlfreec/freec: - modules/controlfreec/freec/** - tests/modules/controlfreec/freec/** +controlfreec/freec2bed: + - modules/controlfreec/freec2bed/** + - tests/modules/controlfreec/freec2bed/** + cooler/cload: - modules/cooler/cload/** - tests/modules/cooler/cload/** diff --git a/tests/modules/controlfreec/freec2bed/main.nf b/tests/modules/controlfreec/freec2bed/main.nf new file mode 100644 index 00000000..df121832 --- /dev/null +++ b/tests/modules/controlfreec/freec2bed/main.nf @@ -0,0 +1,41 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { CONTROLFREEC_FREEC2BED } from '../../../../modules/controlfreec/freec2bed/main.nf' +include { CONTROLFREEC_FREEC } from '../../../../modules/controlfreec/freec/main.nf' +include { UNTAR } from '../../../../modules/untar/main.nf' + +workflow test_controlfreec_freec2bed { + + input = [ + [ id:'test', single_end:false, sex:'XX' ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_mpileup'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_mpileup'], checkIfExists: true), + [],[],[],[] + ] + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta_fai'], checkIfExists: true) + + dbsnp = file(params.test_data['homo_sapiens']['genome']['dbsnp_138_hg38_21_vcf_gz'], checkIfExists: true) + dbsnp_tbi = file(params.test_data['homo_sapiens']['genome']['dbsnp_138_hg38_21_vcf_gz_tbi'], checkIfExists: true) + + chrfiles = [ [], file(params.test_data['homo_sapiens']['genome']['genome_21_chromosomes_dir'], checkIfExists: true) ] + target_bed = file(params.test_data['homo_sapiens']['genome']['genome_21_multi_interval_bed'], checkIfExists: true) + + UNTAR(chrfiles) + CONTROLFREEC_FREEC (input, + fasta, + fai, + [], + dbsnp, + dbsnp_tbi, + UNTAR.out.untar.map{ it[1] }, + [], + target_bed, + [] + ) + + CONTROLFREEC_FREEC2BED ( CONTROLFREEC_FREEC.out.ratio ) +} diff --git a/tests/modules/controlfreec/freec2bed/nextflow.config b/tests/modules/controlfreec/freec2bed/nextflow.config new file mode 100644 index 00000000..8c2f77eb --- /dev/null +++ b/tests/modules/controlfreec/freec2bed/nextflow.config @@ -0,0 +1,27 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName:CONTROLFREEC_FREEC{ + ext.args = { [ + "sample":[ + inputformat: 'pileup', + mateorientation: 'FR' + ], + "general" :[ + bedgraphoutput: "TRUE", + noisydata: "TRUE", + minexpectedgc: "0", + readcountthreshold: "1", + sex: meta.sex, + window: "10", + ], + "control":[ + inputformat: "pileup", + mateorientation: "FR" + ] + ] + } + } + +} diff --git a/tests/modules/controlfreec/freec2bed/test.yml b/tests/modules/controlfreec/freec2bed/test.yml new file mode 100644 index 00000000..0198bac6 --- /dev/null +++ b/tests/modules/controlfreec/freec2bed/test.yml @@ -0,0 +1,8 @@ +- name: controlfreec freec2bed test_controlfreec_freec2bed + command: nextflow run tests/modules/controlfreec/freec2bed -entry test_controlfreec_freec2bed -c tests/config/nextflow.config + tags: + - controlfreec/freec2bed + - controlfreec + files: + - path: output/controlfreec/test.bed + md5sum: abe10b7ce94ba903503e697394c17297 From 28e5211b3513d80f198beb7090f57242165cc030 Mon Sep 17 00:00:00 2001 From: FriederikeHanssen Date: Fri, 25 Mar 2022 20:49:08 +0100 Subject: [PATCH 072/283] add makegraph script (#1452) * add makegraph script * allow renaming of output files * allow renaming of output files --- .../controlfreec/assesssignificance/main.nf | 5 +- modules/controlfreec/makegraph/main.nf | 40 +++++++++++++ modules/controlfreec/makegraph/meta.yml | 58 +++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ .../controlfreec/assesssignificance/test.yml | 2 +- tests/modules/controlfreec/makegraph/main.nf | 42 ++++++++++++++ .../controlfreec/makegraph/nextflow.config | 30 ++++++++++ tests/modules/controlfreec/makegraph/test.yml | 12 ++++ 8 files changed, 191 insertions(+), 2 deletions(-) create mode 100644 modules/controlfreec/makegraph/main.nf create mode 100644 modules/controlfreec/makegraph/meta.yml create mode 100644 tests/modules/controlfreec/makegraph/main.nf create mode 100644 tests/modules/controlfreec/makegraph/nextflow.config create mode 100644 tests/modules/controlfreec/makegraph/test.yml diff --git a/modules/controlfreec/assesssignificance/main.nf b/modules/controlfreec/assesssignificance/main.nf index dc9c6e86..f85a3c7f 100644 --- a/modules/controlfreec/assesssignificance/main.nf +++ b/modules/controlfreec/assesssignificance/main.nf @@ -11,7 +11,7 @@ process CONTROLFREEC_ASSESSSIGNIFICANCE { tuple val(meta), path(cnvs), path(ratio) output: - tuple val(meta), path("*.p.value.txt"), emit: bam + tuple val(meta), path("*.p.value.txt"), emit: p_value_txt path "versions.yml" , emit: versions when: @@ -19,9 +19,12 @@ process CONTROLFREEC_ASSESSSIGNIFICANCE { script: def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" """ cat /usr/local/bin/assess_significance.R | R --slave --args ${cnvs} ${ratio} + mv *.p.value.txt ${prefix}.p.value.txt + cat <<-END_VERSIONS > versions.yml "${task.process}": controlfreec: \$(echo \$(freec -version 2>&1) | sed 's/^.*Control-FREEC //; s/:.*\$//' | sed -e "s/Control-FREEC v//g" ) diff --git a/modules/controlfreec/makegraph/main.nf b/modules/controlfreec/makegraph/main.nf new file mode 100644 index 00000000..9a0c7281 --- /dev/null +++ b/modules/controlfreec/makegraph/main.nf @@ -0,0 +1,40 @@ +process CONTROLFREEC_MAKEGRAPH { + tag "$meta.id" + label 'process_low' + + conda (params.enable_conda ? "bioconda::control-freec=11.6" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/control-freec:11.6--h1b792b2_1': + 'quay.io/biocontainers/control-freec:11.6--h1b792b2_1' }" + + input: + tuple val(meta), path(ratio), path(baf) + + output: + tuple val(meta), path("*_BAF.png") , emit: png_baf + tuple val(meta), path("*_ratio.log2.png"), emit: png_ratio_log2 + tuple val(meta), path("*_ratio.png") , emit: png_ratio + + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: "" + def prefix = task.ext.prefix ?: "${meta.id}" + def baf = baf ?: "" + """ + cat /usr/local/bin/makeGraph.R | R --slave --args ${args} ${ratio} ${baf} + + mv *_BAF.txt.png ${prefix}_BAF.png + mv *_ratio.txt.log2.png ${prefix}_ratio.log2.png + mv *_ratio.txt.png ${prefix}_ratio.png + + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + controlfreec: \$(echo \$(freec -version 2>&1) | sed 's/^.*Control-FREEC //; s/:.*\$//' | sed -e "s/Control-FREEC v//g" ) + END_VERSIONS + """ +} diff --git a/modules/controlfreec/makegraph/meta.yml b/modules/controlfreec/makegraph/meta.yml new file mode 100644 index 00000000..a207ec8c --- /dev/null +++ b/modules/controlfreec/makegraph/meta.yml @@ -0,0 +1,58 @@ +name: controlfreec_makegraph +description: Plot Freec output +keywords: + - cna + - cnv + - somatic + - single + - tumor-only +tools: + - controlfreec: + description: Copy number and genotype annotation from whole genome and whole exome sequencing data. + homepage: http://boevalab.inf.ethz.ch/FREEC + documentation: http://boevalab.inf.ethz.ch/FREEC/tutorial.html + tool_dev_url: https://github.com/BoevaLab/FREEC/ + doi: "10.1093/bioinformatics/btq635" + licence: ["GPL >=2"] + +input: + # Only when we have meta + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - ratio: + type: file + description: ratio file generated by FREEC + pattern: "*.ratio.txt" + - baf: + type: file + description: .BAF file generated by FREEC + pattern: "*.BAF" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - png_baf: + type: file + description: Image of BAF plot + pattern: "*_BAF.png" + - png_ratio_log2: + type: file + description: Image of ratio log2 plot + pattern: "*_ratio.log2.png" + - png_ratio: + type: file + description: Image of ratio plot + pattern: "*_ratio.png" + +authors: + - "@FriederikeHanssen" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 0c77497d..6e2dbb9a 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -431,6 +431,10 @@ controlfreec/freec2bed: - modules/controlfreec/freec2bed/** - tests/modules/controlfreec/freec2bed/** +controlfreec/makegraph: + - modules/controlfreec/makegraph/** + - tests/modules/controlfreec/makegraph/** + cooler/cload: - modules/cooler/cload/** - tests/modules/cooler/cload/** diff --git a/tests/modules/controlfreec/assesssignificance/test.yml b/tests/modules/controlfreec/assesssignificance/test.yml index 58f83f43..f8393330 100644 --- a/tests/modules/controlfreec/assesssignificance/test.yml +++ b/tests/modules/controlfreec/assesssignificance/test.yml @@ -4,7 +4,7 @@ - controlfreec/assesssignificance - controlfreec files: - - path: output/controlfreec/test2.mpileup.gz_CNVs.p.value.txt + - path: output/controlfreec/test.p.value.txt md5sum: 44e23b916535fbc1a3f47b57fad292df - path: output/controlfreec/versions.yml md5sum: 0aa42fed10d61e4570fe1e0e83ffe932 diff --git a/tests/modules/controlfreec/makegraph/main.nf b/tests/modules/controlfreec/makegraph/main.nf new file mode 100644 index 00000000..ffea3d99 --- /dev/null +++ b/tests/modules/controlfreec/makegraph/main.nf @@ -0,0 +1,42 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { CONTROLFREEC_MAKEGRAPH } from '../../../../modules/controlfreec/makegraph/main.nf' +include { CONTROLFREEC_FREEC } from '../../../../modules/controlfreec/freec/main.nf' +include { UNTAR } from '../../../../modules/untar/main.nf' + +workflow test_controlfreec_makegraph { + + input = [ + [ id:'test', single_end:false, sex:'XX' ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_mpileup'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_mpileup'], checkIfExists: true), + [],[],[],[] + ] + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta_fai'], checkIfExists: true) + + dbsnp = file(params.test_data['homo_sapiens']['genome']['dbsnp_138_hg38_21_vcf_gz'], checkIfExists: true) + dbsnp_tbi = file(params.test_data['homo_sapiens']['genome']['dbsnp_138_hg38_21_vcf_gz_tbi'], checkIfExists: true) + + chrfiles = [ [], file(params.test_data['homo_sapiens']['genome']['genome_21_chromosomes_dir'], checkIfExists: true) ] + target_bed = file(params.test_data['homo_sapiens']['genome']['genome_21_multi_interval_bed'], checkIfExists: true) + + UNTAR(chrfiles) + CONTROLFREEC_FREEC (input, + fasta, + fai, + [], + dbsnp, + dbsnp_tbi, + UNTAR.out.untar.map{ it[1] }, + [], + target_bed, + [] + ) + + makegraph_in = CONTROLFREEC_FREEC.out.ratio.join(CONTROLFREEC_FREEC.out.BAF) + CONTROLFREEC_MAKEGRAPH ( makegraph_in ) +} diff --git a/tests/modules/controlfreec/makegraph/nextflow.config b/tests/modules/controlfreec/makegraph/nextflow.config new file mode 100644 index 00000000..f88fae50 --- /dev/null +++ b/tests/modules/controlfreec/makegraph/nextflow.config @@ -0,0 +1,30 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName:CONTROLFREEC_FREEC{ + ext.args = { [ + "sample":[ + inputformat: 'pileup', + mateorientation: 'FR' + ], + "general" :[ + bedgraphoutput: "TRUE", + noisydata: "TRUE", + minexpectedgc: "0", + readcountthreshold: "1", + sex: meta.sex, + window: "10", + ], + "control":[ + inputformat: "pileup", + mateorientation: "FR" + ] + ] + } + } + + withName:CONTROLFREEC_MAKEGRAPH { + ext.args = "2" + } +} diff --git a/tests/modules/controlfreec/makegraph/test.yml b/tests/modules/controlfreec/makegraph/test.yml new file mode 100644 index 00000000..21e78766 --- /dev/null +++ b/tests/modules/controlfreec/makegraph/test.yml @@ -0,0 +1,12 @@ +- name: controlfreec makegraph test_controlfreec_makegraph + command: nextflow run tests/modules/controlfreec/makegraph -entry test_controlfreec_makegraph -c tests/config/nextflow.config + tags: + - controlfreec + - controlfreec/makegraph + files: + - path: output/controlfreec/test_BAF.png + md5sum: f9d977839e09c7e2472d970bd4aa834c + - path: output/controlfreec/test_ratio.log2.png + md5sum: b3c7916b1b4951a0cc3da20d8e9e0262 + - path: output/controlfreec/test_ratio.png + md5sum: 1435b29536b3b1555b4c423f8f4fb000 From 5acf301ddda04072cf7233a6c8f5fa5df867de99 Mon Sep 17 00:00:00 2001 From: FriederikeHanssen Date: Sat, 26 Mar 2022 21:25:29 +0100 Subject: [PATCH 073/283] add freec2circos script (#1454) * add freec2circos script * remove todo statements --- modules/controlfreec/freec2circos/main.nf | 31 +++++++++++++ modules/controlfreec/freec2circos/meta.yml | 45 +++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ .../modules/controlfreec/freec2circos/main.nf | 41 +++++++++++++++++ .../controlfreec/freec2circos/nextflow.config | 27 +++++++++++ .../controlfreec/freec2circos/test.yml | 8 ++++ 6 files changed, 156 insertions(+) create mode 100644 modules/controlfreec/freec2circos/main.nf create mode 100644 modules/controlfreec/freec2circos/meta.yml create mode 100644 tests/modules/controlfreec/freec2circos/main.nf create mode 100644 tests/modules/controlfreec/freec2circos/nextflow.config create mode 100644 tests/modules/controlfreec/freec2circos/test.yml diff --git a/modules/controlfreec/freec2circos/main.nf b/modules/controlfreec/freec2circos/main.nf new file mode 100644 index 00000000..8879d4c0 --- /dev/null +++ b/modules/controlfreec/freec2circos/main.nf @@ -0,0 +1,31 @@ +process CONTROLFREEC_FREEC2CIRCOS { + tag "$meta.id" + label 'process_low' + + conda (params.enable_conda ? "bioconda::control-freec=11.6" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/control-freec:11.6--h1b792b2_1': + 'quay.io/biocontainers/control-freec:11.6--h1b792b2_1' }" + + input: + tuple val(meta), path(ratio) + + output: + tuple val(meta), path("*.circos.txt"), emit: circos + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + """ + freec2circos.pl -f ${ratio} ${args} > ${prefix}.circos.txt + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + controlfreec: \$(echo \$(freec -version 2>&1) | sed 's/^.*Control-FREEC //; s/:.*\$//' | sed -e "s/Control-FREEC v//g" ) + END_VERSIONS + """ +} diff --git a/modules/controlfreec/freec2circos/meta.yml b/modules/controlfreec/freec2circos/meta.yml new file mode 100644 index 00000000..ff845a82 --- /dev/null +++ b/modules/controlfreec/freec2circos/meta.yml @@ -0,0 +1,45 @@ +name: controlfreec_freec2circos +description: Format Freec output to circos input format +keywords: + - cna + - cnv + - somatic + - single + - tumor-only +tools: + - controlfreec: + description: Copy number and genotype annotation from whole genome and whole exome sequencing data. + homepage: http://boevalab.inf.ethz.ch/FREEC + documentation: http://boevalab.inf.ethz.ch/FREEC/tutorial.html + tool_dev_url: https://github.com/BoevaLab/FREEC/ + doi: "10.1093/bioinformatics/btq635" + licence: ["GPL >=2"] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - ratio: + type: file + description: ratio file generated by FREEC + pattern: "*.ratio.txt" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - circos: + type: file + description: Txt file + pattern: "*.circos.txt" + +authors: + - "@FriederikeHanssen" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 6e2dbb9a..a6e2067f 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -431,6 +431,10 @@ controlfreec/freec2bed: - modules/controlfreec/freec2bed/** - tests/modules/controlfreec/freec2bed/** +controlfreec/freec2circos: + - modules/controlfreec/freec2circos/** + - tests/modules/controlfreec/freec2circos/** + controlfreec/makegraph: - modules/controlfreec/makegraph/** - tests/modules/controlfreec/makegraph/** diff --git a/tests/modules/controlfreec/freec2circos/main.nf b/tests/modules/controlfreec/freec2circos/main.nf new file mode 100644 index 00000000..9b655f0e --- /dev/null +++ b/tests/modules/controlfreec/freec2circos/main.nf @@ -0,0 +1,41 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { CONTROLFREEC_FREEC2CIRCOS } from '../../../../modules/controlfreec/freec2circos/main.nf' +include { CONTROLFREEC_FREEC } from '../../../../modules/controlfreec/freec/main.nf' +include { UNTAR } from '../../../../modules/untar/main.nf' + +workflow test_controlfreec_freec2circos { + + input = [ + [ id:'test', single_end:false, sex:'XX' ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_mpileup'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_mpileup'], checkIfExists: true), + [],[],[],[] + ] + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta_fai'], checkIfExists: true) + + dbsnp = file(params.test_data['homo_sapiens']['genome']['dbsnp_138_hg38_21_vcf_gz'], checkIfExists: true) + dbsnp_tbi = file(params.test_data['homo_sapiens']['genome']['dbsnp_138_hg38_21_vcf_gz_tbi'], checkIfExists: true) + + chrfiles = [ [], file(params.test_data['homo_sapiens']['genome']['genome_21_chromosomes_dir'], checkIfExists: true) ] + target_bed = file(params.test_data['homo_sapiens']['genome']['genome_21_multi_interval_bed'], checkIfExists: true) + + UNTAR(chrfiles) + CONTROLFREEC_FREEC (input, + fasta, + fai, + [], + dbsnp, + dbsnp_tbi, + UNTAR.out.untar.map{ it[1] }, + [], + target_bed, + [] + ) + + CONTROLFREEC_FREEC2CIRCOS ( CONTROLFREEC_FREEC.out.ratio ) +} diff --git a/tests/modules/controlfreec/freec2circos/nextflow.config b/tests/modules/controlfreec/freec2circos/nextflow.config new file mode 100644 index 00000000..8c2f77eb --- /dev/null +++ b/tests/modules/controlfreec/freec2circos/nextflow.config @@ -0,0 +1,27 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName:CONTROLFREEC_FREEC{ + ext.args = { [ + "sample":[ + inputformat: 'pileup', + mateorientation: 'FR' + ], + "general" :[ + bedgraphoutput: "TRUE", + noisydata: "TRUE", + minexpectedgc: "0", + readcountthreshold: "1", + sex: meta.sex, + window: "10", + ], + "control":[ + inputformat: "pileup", + mateorientation: "FR" + ] + ] + } + } + +} diff --git a/tests/modules/controlfreec/freec2circos/test.yml b/tests/modules/controlfreec/freec2circos/test.yml new file mode 100644 index 00000000..5758a828 --- /dev/null +++ b/tests/modules/controlfreec/freec2circos/test.yml @@ -0,0 +1,8 @@ +- name: controlfreec freec2circos test_controlfreec_freec2circos + command: nextflow run tests/modules/controlfreec/freec2circos -entry test_controlfreec_freec2circos -c tests/config/nextflow.config + tags: + - controlfreec + - controlfreec/freec2circos + files: + - path: output/controlfreec/test.circos.txt + md5sum: 19cf35f2c36b46f717dc8342b8a5a645 From 5832fbc225b59577677bd8c38353a3de293998d1 Mon Sep 17 00:00:00 2001 From: Edmund Miller Date: Sat, 26 Mar 2022 22:17:42 -0500 Subject: [PATCH 074/283] Fix homer/maketagdirectory output collision (#1456) * fix(homer): Update tagdir to have a prefix Otherwise they have a naming collision * test(homer): Update paths * style(homer): Align ,'s * chore(homer): Update md5sums --- modules/homer/maketagdirectory/main.nf | 7 ++-- modules/homer/maketagdirectory/meta.yml | 8 +++-- tests/modules/homer/findpeaks/test.yml | 2 +- tests/modules/homer/maketagdirectory/test.yml | 36 +++++++++---------- tests/modules/homer/pos2bed/test.yml | 4 +-- 5 files changed, 31 insertions(+), 26 deletions(-) diff --git a/modules/homer/maketagdirectory/main.nf b/modules/homer/maketagdirectory/main.nf index 0ab855da..35b6904c 100644 --- a/modules/homer/maketagdirectory/main.nf +++ b/modules/homer/maketagdirectory/main.nf @@ -14,8 +14,9 @@ process HOMER_MAKETAGDIRECTORY { path fasta output: - tuple val(meta), path("tag_dir"), emit: tagdir - path "versions.yml" , emit: versions + tuple val(meta), path("*_tagdir") , emit: tagdir + tuple val(meta), path("*_tagdir/tagInfo.txt"), emit: taginfo + path "versions.yml" , emit: versions when: task.ext.when == null || task.ext.when @@ -25,7 +26,7 @@ process HOMER_MAKETAGDIRECTORY { def prefix = task.ext.prefix ?: "${meta.id}" """ makeTagDirectory \\ - tag_dir \\ + ${prefix}_tagdir \\ -genome $fasta \\ $args \\ $bam diff --git a/modules/homer/maketagdirectory/meta.yml b/modules/homer/maketagdirectory/meta.yml index 31d59ee7..ccd2d6a8 100644 --- a/modules/homer/maketagdirectory/meta.yml +++ b/modules/homer/maketagdirectory/meta.yml @@ -57,10 +57,14 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - tag_dir: + - tagdir: type: directory description: The "Tag Directory" - pattern: "tag_dir" + pattern: "*_tagdir" + - taginfo: + type: directory + description: The tagInfo.txt included to ensure there's proper output + pattern: "*_tagdir/tagInfo.txt" - versions: type: file description: File containing software versions diff --git a/tests/modules/homer/findpeaks/test.yml b/tests/modules/homer/findpeaks/test.yml index 75e94529..b5225404 100644 --- a/tests/modules/homer/findpeaks/test.yml +++ b/tests/modules/homer/findpeaks/test.yml @@ -5,4 +5,4 @@ - homer/findpeaks files: - path: output/homer/test.peaks.txt - md5sum: f75ac1fea67f1e307a1ad4d059a9b6cc + md5sum: 86e15beaa4b439585786478e58418c0c diff --git a/tests/modules/homer/maketagdirectory/test.yml b/tests/modules/homer/maketagdirectory/test.yml index 28e74c32..970ca906 100644 --- a/tests/modules/homer/maketagdirectory/test.yml +++ b/tests/modules/homer/maketagdirectory/test.yml @@ -4,15 +4,15 @@ - homer - homer/maketagdirectory files: - - path: output/homer/tag_dir/MT192765.1.tags.tsv + - path: output/homer/test_tagdir/MT192765.1.tags.tsv md5sum: e29522171ca2169b57396495f8b97485 - - path: output/homer/tag_dir/tagAutocorrelation.txt + - path: output/homer/test_tagdir/tagAutocorrelation.txt md5sum: 62b107c4971b94126fb89a0bc2800455 - - path: output/homer/tag_dir/tagCountDistribution.txt + - path: output/homer/test_tagdir/tagCountDistribution.txt md5sum: fd4ee7ce7c5dfd7c9d739534b8180578 - - path: output/homer/tag_dir/tagInfo.txt - md5sum: ff56f30411b221b847aa4e6e9a6098a1 - - path: output/homer/tag_dir/tagLengthDistribution.txt + - path: output/homer/test_tagdir/tagInfo.txt + md5sum: c9bb2ca53bb101d74c1ec92d2b0ad26e + - path: output/homer/test_tagdir/tagLengthDistribution.txt md5sum: e5aa2b9843ca9c04ace297280aed6af4 - name: homer maketagdirectory meta @@ -21,15 +21,15 @@ - homer - homer/maketagdirectory files: - - path: output/homer/tag_dir/MT192765.1.tags.tsv + - path: output/homer/meta_test_tagdir/MT192765.1.tags.tsv md5sum: e29522171ca2169b57396495f8b97485 - - path: output/homer/tag_dir/tagAutocorrelation.txt + - path: output/homer/meta_test_tagdir/tagAutocorrelation.txt md5sum: 62b107c4971b94126fb89a0bc2800455 - - path: output/homer/tag_dir/tagCountDistribution.txt + - path: output/homer/meta_test_tagdir/tagCountDistribution.txt md5sum: fd4ee7ce7c5dfd7c9d739534b8180578 - - path: output/homer/tag_dir/tagInfo.txt - md5sum: ff56f30411b221b847aa4e6e9a6098a1 - - path: output/homer/tag_dir/tagLengthDistribution.txt + - path: output/homer/meta_test_tagdir/tagInfo.txt + md5sum: cb907ebf9afc042bb61196d624e793c8 + - path: output/homer/meta_test_tagdir/tagLengthDistribution.txt md5sum: e5aa2b9843ca9c04ace297280aed6af4 - name: homer maketagdirectory bam @@ -38,13 +38,13 @@ - homer - homer/maketagdirectory files: - - path: output/homer/tag_dir/MT192765.1.tags.tsv + - path: output/homer/test_tagdir/MT192765.1.tags.tsv md5sum: 365808c4751ef6dd7085ac52037a22bc - - path: output/homer/tag_dir/tagAutocorrelation.txt + - path: output/homer/test_tagdir/tagAutocorrelation.txt md5sum: 8b396f2aef1cdd3af4fab57b142d3250 - - path: output/homer/tag_dir/tagCountDistribution.txt + - path: output/homer/test_tagdir/tagCountDistribution.txt md5sum: afc6d007096c3872bbe84c9dc8edb832 - - path: output/homer/tag_dir/tagInfo.txt - md5sum: fbaf46eeb8a0723fa8b5eabd93f9d821 - - path: output/homer/tag_dir/tagLengthDistribution.txt + - path: output/homer/test_tagdir/tagInfo.txt + md5sum: aebf6ff15fd0a238ee6a94d623c578ca + - path: output/homer/test_tagdir/tagLengthDistribution.txt md5sum: 44f231adb2a705ae81950808c55cf248 diff --git a/tests/modules/homer/pos2bed/test.yml b/tests/modules/homer/pos2bed/test.yml index 525d61c8..6aad55bc 100644 --- a/tests/modules/homer/pos2bed/test.yml +++ b/tests/modules/homer/pos2bed/test.yml @@ -4,7 +4,7 @@ - "homer" - "homer/pos2bed" files: - - path: "output/homer/test.bed" - md5sum: 0b9ebd8f06b9c820a551fbdb2d7635ee + - path: output/homer/test.bed + md5sum: 5d6ddd9c7e621a66f6f045b9b5abecb4 - path: output/homer/versions.yml md5sum: 1485f4b2d76484e8fe3310e2505de2fd From cc671a5f3c7a063ca3f84666ecbfbe3547d71a12 Mon Sep 17 00:00:00 2001 From: Ramprasad Neethiraj <20065894+ramprasadn@users.noreply.github.com> Date: Sun, 27 Mar 2022 14:15:43 +0200 Subject: [PATCH 075/283] Update svdb/merge (#1449) * fix error * fix input string --- modules/svdb/merge/main.nf | 1 + 1 file changed, 1 insertion(+) diff --git a/modules/svdb/merge/main.nf b/modules/svdb/merge/main.nf index 58aef652..9e729bf3 100644 --- a/modules/svdb/merge/main.nf +++ b/modules/svdb/merge/main.nf @@ -25,6 +25,7 @@ process SVDB_MERGE { def prio = "" if(priority) { prio = "--priority ${priority.join(',')}" + input = "" for (int index = 0; index < vcfs.size(); index++) { input += " ${vcfs[index]}:${priority[index]}" } From 1f483d9203a61e17259332f1eee5502d6a51cb61 Mon Sep 17 00:00:00 2001 From: Tanja Normark <35598351+talnor@users.noreply.github.com> Date: Mon, 28 Mar 2022 14:28:50 +0200 Subject: [PATCH 076/283] Add kaiju_kaiju module (#1448) * Added kaiju_kaiju module * Update modules/kaiju/kaiju/main.nf Co-authored-by: James A. Fellows Yates * Update modules/kaiju/kaiju/main.nf Co-authored-by: James A. Fellows Yates * Update modules/kaiju/kaiju/meta.yml Co-authored-by: James A. Fellows Yates * Update keywords Co-authored-by: James A. Fellows Yates * Update output file naming * Update output file naming * update spacing for lint * Update input file patterns Co-authored-by: James A. Fellows Yates Co-authored-by: James A. Fellows Yates --- modules/kaiju/kaiju/main.nf | 39 +++++++++++++++++ modules/kaiju/kaiju/meta.yml | 52 +++++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/config/test_data.config | 4 ++ tests/modules/kaiju/kaiju/main.nf | 34 +++++++++++++++ tests/modules/kaiju/kaiju/nextflow.config | 5 +++ tests/modules/kaiju/kaiju/test.yml | 21 +++++++++ 7 files changed, 159 insertions(+) create mode 100644 modules/kaiju/kaiju/main.nf create mode 100644 modules/kaiju/kaiju/meta.yml create mode 100644 tests/modules/kaiju/kaiju/main.nf create mode 100644 tests/modules/kaiju/kaiju/nextflow.config create mode 100644 tests/modules/kaiju/kaiju/test.yml diff --git a/modules/kaiju/kaiju/main.nf b/modules/kaiju/kaiju/main.nf new file mode 100644 index 00000000..4050ede5 --- /dev/null +++ b/modules/kaiju/kaiju/main.nf @@ -0,0 +1,39 @@ +process KAIJU_KAIJU { + tag "$meta.id" + label 'process_high' + + conda (params.enable_conda ? "bioconda::kaiju=1.8.2" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/kaiju:1.8.2--h5b5514e_1': + 'quay.io/biocontainers/kaiju:1.8.2--h5b5514e_1' }" + + input: + tuple val(meta), path(reads) + tuple path(db), path(dbnodes) + + output: + tuple val(meta), path('*.tsv'), emit: results + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def input = meta.single_end ? "-i ${reads}" : "-i ${reads[0]} -j ${reads[1]}" + """ + kaiju \\ + $args \\ + -z $task.cpus \\ + -t ${dbnodes} \\ + -f ${db} \\ + -o ${prefix}.tsv \\ + $input + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + kaiju: \$(echo \$( kaiju -h 2>&1 | sed -n 1p | sed 's/^.*Kaiju //' )) + END_VERSIONS + """ +} diff --git a/modules/kaiju/kaiju/meta.yml b/modules/kaiju/kaiju/meta.yml new file mode 100644 index 00000000..69a74037 --- /dev/null +++ b/modules/kaiju/kaiju/meta.yml @@ -0,0 +1,52 @@ +name: kaiju_kaiju +description: Taxonomic classification of metagenomic sequence data using a protein reference database +keywords: + - classify + - metagenomics + - fastq + - taxonomic profiling +tools: + - kaiju: + description: Fast and sensitive taxonomic classification for metagenomics + homepage: https://kaiju.binf.ku.dk/ + documentation: https://github.com/bioinformatics-centre/kaiju/blob/master/README.md + tool_dev_url: https://github.com/bioinformatics-centre/kaiju + doi: "10.1038/ncomms11257" + licence: ["GNU GPL v3"] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: | + List of input fastq/fasta files of size 1 and 2 for single-end and paired-end data, + respectively. + pattern: "*.{fastq,fq,fasta,fa,fsa,fas,fna,fastq.gz,fq.gz,fasta.gz,fa.gz,fsa.gz,fas.gz,fna.gz}" + - db: + type: files + description: | + List containing the database and nodes files for Kaiju + e.g. [ 'database.fmi', 'nodes.dmp' ] + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - results: + type: file + description: Results with taxonomic classification of each read + pattern: "*.tsv" + +authors: + - "@talnor" + - "@sofstam" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index a6e2067f..2a7b2f70 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -961,6 +961,10 @@ jupyternotebook: - modules/jupyternotebook/** - tests/modules/jupyternotebook/** +kaiju/kaiju: + - modules/kaiju/kaiju/** + - tests/modules/kaiju/kaiju/** + kallisto/index: - modules/kallisto/index/** - tests/modules/kallisto/index/** diff --git a/tests/config/test_data.config b/tests/config/test_data.config index 230e8d43..836604b8 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -28,6 +28,10 @@ params { kraken2_bracken = "${test_data_dir}/genomics/sarscov2/genome/db/kraken2_bracken" kraken2_bracken_tar_gz = "${test_data_dir}/genomics/sarscov2/genome/db/kraken2_bracken.tar.gz" + kaiju_fmi = "${test_data_dir}/genomics/sarscov2/genome/db/kaiju/proteins.fmi" + kaiju_nodes = "${test_data_dir}/genomics/sarscov2/genome/db/kaiju/nodes.dmp" + kaiju_names = "${test_data_dir}/genomics/sarscov2/genome/db/kaiju/names.dmp" + ncbi_taxmap_zip = "${test_data_dir}/genomics/sarscov2/genome/db/maltextract/ncbi_taxmap.zip" taxon_list_txt = "${test_data_dir}/genomics/sarscov2/genome/db/maltextract/taxon_list.txt" diff --git a/tests/modules/kaiju/kaiju/main.nf b/tests/modules/kaiju/kaiju/main.nf new file mode 100644 index 00000000..00da82a9 --- /dev/null +++ b/tests/modules/kaiju/kaiju/main.nf @@ -0,0 +1,34 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { KAIJU_KAIJU } from '../../../../modules/kaiju/kaiju/main.nf' + +workflow test_kaiju_kaiju_single_end { + + input = [ + [ id:'test', single_end:true ], // meta map + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] + db = [ + file(params.test_data['sarscov2']['genome']['kaiju_fmi'], checkIfExists: true), // database + file(params.test_data['sarscov2']['genome']['kaiju_nodes'], checkIfExists: true) // taxon nodes + ] + + KAIJU_KAIJU ( input, db ) +} + +workflow test_kaiju_kaiju_paired_end { + + input = [ + [ id:'test', single_end:false ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] + ] + db = [ + file(params.test_data['sarscov2']['genome']['kaiju_fmi'], checkIfExists: true), // database + file(params.test_data['sarscov2']['genome']['kaiju_nodes'], checkIfExists: true) // taxon nodes + ] + + KAIJU_KAIJU ( input, db ) +} diff --git a/tests/modules/kaiju/kaiju/nextflow.config b/tests/modules/kaiju/kaiju/nextflow.config new file mode 100644 index 00000000..50f50a7a --- /dev/null +++ b/tests/modules/kaiju/kaiju/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} \ No newline at end of file diff --git a/tests/modules/kaiju/kaiju/test.yml b/tests/modules/kaiju/kaiju/test.yml new file mode 100644 index 00000000..72eb64c5 --- /dev/null +++ b/tests/modules/kaiju/kaiju/test.yml @@ -0,0 +1,21 @@ +- name: kaiju kaiju test_kaiju_kaiju_single_end + command: nextflow run tests/modules/kaiju/kaiju -entry test_kaiju_kaiju_single_end -c tests/config/nextflow.config + tags: + - kaiju/kaiju + - kaiju + files: + - path: output/kaiju/test.tsv + contains: ["C\tERR5069949.2257580\t2697049"] + - path: output/kaiju/versions.yml + md5sum: 7e218c0ea00a71dd3a5ec5aaf28804f4 + +- name: kaiju kaiju test_kaiju_kaiju_paired_end + command: nextflow run tests/modules/kaiju/kaiju -entry test_kaiju_kaiju_paired_end -c tests/config/nextflow.config + tags: + - kaiju/kaiju + - kaiju + files: + - path: output/kaiju/test.tsv + contains: ["C\tERR5069949.2257580\t2697049"] + - path: output/kaiju/versions.yml + md5sum: a74215f6f69979ae046fb1d65c56ac67 From 240ee4328cffce14e983f6871cc0ea39cf7a5041 Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Mon, 28 Mar 2022 16:29:46 +0200 Subject: [PATCH 077/283] Dastool update to allow non-gzipped inputs (#1458) * fix: remove left-over unnecessary code * Make gzipping optional for DAS_Tool scaffolds2bin * Add optional unzipping * Make gunzip optional for DAS_Tool scaffolds2bin * Apply suggestions from code review Co-authored-by: Daniel Straub <42973691+d4straub@users.noreply.github.com> * Update modules/dastool/scaffolds2bin/meta.yml Co-authored-by: Daniel Straub <42973691+d4straub@users.noreply.github.com> --- modules/dastool/scaffolds2bin/main.nf | 5 ++-- modules/dastool/scaffolds2bin/meta.yml | 9 ++---- tests/modules/dastool/scaffolds2bin/main.nf | 31 +++++++++++++++++--- tests/modules/dastool/scaffolds2bin/test.yml | 20 ++++++++----- 4 files changed, 46 insertions(+), 19 deletions(-) diff --git a/modules/dastool/scaffolds2bin/main.nf b/modules/dastool/scaffolds2bin/main.nf index 6e9fcdc2..365872fd 100644 --- a/modules/dastool/scaffolds2bin/main.nf +++ b/modules/dastool/scaffolds2bin/main.nf @@ -22,9 +22,10 @@ process DASTOOL_SCAFFOLDS2BIN { def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" def file_extension = extension ? extension : "fasta" - + def clean_fasta = fasta.toString() - ".gz" + def decompress_fasta = fasta.toString() == clean_fasta ? "" : "gunzip -q -f $fasta" """ - gunzip -f *.${file_extension}.gz + $decompress_fasta Fasta_to_Scaffolds2Bin.sh \\ $args \\ diff --git a/modules/dastool/scaffolds2bin/meta.yml b/modules/dastool/scaffolds2bin/meta.yml index 0bf8618d..823084b3 100644 --- a/modules/dastool/scaffolds2bin/meta.yml +++ b/modules/dastool/scaffolds2bin/meta.yml @@ -30,14 +30,11 @@ input: e.g. [ id:'test', single_end:false ] - fasta: type: file - description: Fasta of list of fasta files recommended to be gathered via with .collect() of bins - pattern: "*.{fa,fas,fasta}" - - binner: - type: val - description: Name of the binning software (optional) + description: Fasta or list of fasta files recommended to be gathered via with .collect() of bins + pattern: "*.{fa,fa.gz,fas,fas.gz,fna,fna.gz,fasta,fasta.gz}" - extension: type: val - description: Fasta file extension (fa | fas | fasta | ...) + description: Fasta file extension (fa | fas | fasta | ...), but without .gz suffix, even if gzipped input. output: - meta: diff --git a/tests/modules/dastool/scaffolds2bin/main.nf b/tests/modules/dastool/scaffolds2bin/main.nf index a0cd6726..c45a6f2b 100644 --- a/tests/modules/dastool/scaffolds2bin/main.nf +++ b/tests/modules/dastool/scaffolds2bin/main.nf @@ -2,9 +2,10 @@ nextflow.enable.dsl = 2 -include { METABAT2_METABAT2 } from '../../../../modules/metabat2/metabat2/main.nf' -include { METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS } from '../../../../modules/metabat2/jgisummarizebamcontigdepths/main.nf' -include { DASTOOL_SCAFFOLDS2BIN } from '../../../../modules/dastool/scaffolds2bin/main.nf' +include { GUNZIP } from '../../../../modules/gunzip/main.nf' +include { METABAT2_METABAT2 } from '../../../../modules/metabat2/metabat2/main.nf' +include { METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS } from '../../../../modules/metabat2/jgisummarizebamcontigdepths/main.nf' +include { DASTOOL_SCAFFOLDS2BIN } from '../../../../modules/dastool/scaffolds2bin/main.nf' workflow test_dastool_scaffolds2bin { @@ -22,4 +23,26 @@ workflow test_dastool_scaffolds2bin { METABAT2_METABAT2 ( input_metabat2 ) DASTOOL_SCAFFOLDS2BIN ( METABAT2_METABAT2.out.fasta.collect(), "fa") -} \ No newline at end of file +} + +workflow test_dastool_scaffolds2bin_ungzipped { + + input_depth = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['bacteroides_fragilis']['illumina']['test1_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['bacteroides_fragilis']['illumina']['test1_paired_end_sorted_bam_bai'], checkIfExists: true) ] + + + METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS ( input_depth ) + + Channel.fromPath(params.test_data['bacteroides_fragilis']['genome']['genome_fna_gz'], checkIfExists: true) + .map { it -> [[ id:'test', single_end:false ], it] } + .join(METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS.out.depth) + .set { input_metabat2 } + + METABAT2_METABAT2 ( input_metabat2 ) + + // TODO test unzipped input files + ch_input_2_scaffolds2bin = GUNZIP( METABAT2_METABAT2.out.fasta ).gunzip + + DASTOOL_SCAFFOLDS2BIN ( ch_input_2_scaffolds2bin, "fa") +} diff --git a/tests/modules/dastool/scaffolds2bin/test.yml b/tests/modules/dastool/scaffolds2bin/test.yml index 26f528c9..3d91c8c1 100644 --- a/tests/modules/dastool/scaffolds2bin/test.yml +++ b/tests/modules/dastool/scaffolds2bin/test.yml @@ -1,14 +1,20 @@ - name: dastool scaffolds2bin test_dastool_scaffolds2bin - command: nextflow run ./tests/modules/dastool/scaffolds2bin -entry test_dastool_scaffolds2bin -c ./tests/config/nextflow.config -c ./tests/modules/dastool/scaffolds2bin/nextflow.config + command: nextflow run tests/modules/dastool/scaffolds2bin -entry test_dastool_scaffolds2bin -c tests/config/nextflow.config tags: - dastool - dastool/scaffolds2bin files: - path: output/dastool/test.tsv md5sum: 6e46c0be14dded7cb13af38f54feea47 - - path: output/metabat2/bins/test.1.fa.gz - md5sum: 2b297bf557cc3831b800348859331268 - - path: output/metabat2/test.tsv.gz - md5sum: 619338fa5019e361d5545ce385a6961f - - path: output/metabat2/test.txt.gz - md5sum: 745a0446af6ef68b930975e9ce5a95d6 + - path: output/dastool/versions.yml + md5sum: d0831ed159eb5a1a1565d1d211012ad6 +- name: dastool scaffolds2bin test_dastool_scaffolds2bin_ungzipped + command: nextflow run tests/modules/dastool/scaffolds2bin -entry test_dastool_scaffolds2bin_ungzipped -c tests/config/nextflow.config + tags: + - dastool + - dastool/scaffolds2bin + files: + - path: output/dastool/test.tsv + md5sum: 6e46c0be14dded7cb13af38f54feea47 + - path: output/dastool/versions.yml + md5sum: da58e477b7f4c16a9ea495ec1a4a4d4f From 49b18b1639f4f7104187058866a8fab33332bdfe Mon Sep 17 00:00:00 2001 From: Ramprasad Neethiraj <20065894+ramprasadn@users.noreply.github.com> Date: Mon, 28 Mar 2022 17:48:39 +0200 Subject: [PATCH 078/283] add stub to modules used in raredisease pipeline (#1206) * add stub section * add stub for bcftools norm * add stub to more modules * fix expansionhunter tests * revert changes -picard * Update stub to write version no.s to a file * add picard * revert picard again * add stubs to more modules * fix bwamem2 * add bcftools view * add stubs * fix svdb query * review suggestions --- modules/bcftools/norm/main.nf | 11 +++ modules/bcftools/view/main.nf | 11 +++ modules/bwamem2/index/main.nf | 15 ++++ modules/bwamem2/mem/main.nf | 11 +++ modules/cat/cat/main.nf | 10 +++ modules/deepvariant/main.nf | 12 ++++ modules/expansionhunter/main.nf | 11 +++ modules/fastqc/main.nf | 12 ++++ modules/gatk4/bedtointervallist/main.nf | 11 +++ .../gatk4/createsequencedictionary/main.nf | 11 +++ modules/gatk4/intervallisttools/main.nf | 18 +++++ modules/glnexus/main.nf | 11 +++ modules/multiqc/main.nf | 12 ++++ modules/picard/collecthsmetrics/main.nf | 11 +++ modules/picard/collectmultiplemetrics/main.nf | 20 ++++++ modules/picard/markduplicates/main.nf | 13 ++++ modules/picard/sortvcf/main.nf | 13 ++++ modules/qualimap/bamqc/main.nf | 68 +++++++++++++++++++ modules/samtools/faidx/main.nf | 10 +++ modules/samtools/index/main.nf | 12 ++++ modules/samtools/merge/main.nf | 12 ++++ modules/samtools/sort/main.nf | 11 +++ modules/samtools/stats/main.nf | 11 +++ modules/svdb/query/main.nf | 1 - modules/tabix/bgziptabix/main.nf | 12 ++++ modules/tabix/tabix/main.nf | 11 +++ modules/tiddit/cov/main.nf | 12 ++++ modules/tiddit/sv/main.nf | 13 ++++ modules/ucsc/wigtobigwig/main.nf | 11 +++ modules/untar/main.nf | 11 +++ modules/vcfanno/main.nf | 11 +++ tests/modules/expansionhunter/main.nf | 4 +- tests/modules/expansionhunter/test.yml | 2 +- 33 files changed, 421 insertions(+), 4 deletions(-) diff --git a/modules/bcftools/norm/main.nf b/modules/bcftools/norm/main.nf index cd681f21..b81a4310 100644 --- a/modules/bcftools/norm/main.nf +++ b/modules/bcftools/norm/main.nf @@ -34,4 +34,15 @@ process BCFTOOLS_NORM { bcftools: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') END_VERSIONS """ + + stub: + def prefix = task.ext.prefix ?: "${meta.id}" + """ + touch ${prefix}.vcf.gz + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + bcftools: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') + END_VERSIONS + """ } diff --git a/modules/bcftools/view/main.nf b/modules/bcftools/view/main.nf index 2a240f4a..ca1121a5 100644 --- a/modules/bcftools/view/main.nf +++ b/modules/bcftools/view/main.nf @@ -41,4 +41,15 @@ process BCFTOOLS_VIEW { bcftools: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') END_VERSIONS """ + + stub: + def prefix = task.ext.prefix ?: "${meta.id}" + """ + touch ${prefix}.vcf.gz + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + bcftools: \$(bcftools --version 2>&1 | head -n1 | sed 's/^.*bcftools //; s/ .*\$//') + END_VERSIONS + """ } diff --git a/modules/bwamem2/index/main.nf b/modules/bwamem2/index/main.nf index 0e9cc2f8..900f27d4 100644 --- a/modules/bwamem2/index/main.nf +++ b/modules/bwamem2/index/main.nf @@ -31,4 +31,19 @@ process BWAMEM2_INDEX { bwamem2: \$(echo \$(bwa-mem2 version 2>&1) | sed 's/.* //') END_VERSIONS """ + + stub: + """ + mkdir bwamem2 + touch bwamem2/${fasta}.0123 + touch bwamem2/${fasta}.ann + touch bwamem2/${fasta}.pac + touch bwamem2/${fasta}.amb + touch bwamem2/${fasta}.bwt.2bit.64 + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + bwamem2: \$(echo \$(bwa-mem2 version 2>&1) | sed 's/.* //') + END_VERSIONS + """ } diff --git a/modules/bwamem2/mem/main.nf b/modules/bwamem2/mem/main.nf index 21dfb1d6..e3a3d164 100644 --- a/modules/bwamem2/mem/main.nf +++ b/modules/bwamem2/mem/main.nf @@ -43,4 +43,15 @@ process BWAMEM2_MEM { samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ + + stub: + def prefix = task.ext.prefix ?: "${meta.id}" + """ + touch ${prefix}.bam + cat <<-END_VERSIONS > versions.yml + "${task.process}": + bwamem2: \$(echo \$(bwa-mem2 version 2>&1) | sed 's/.* //') + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + END_VERSIONS + """ } diff --git a/modules/cat/cat/main.nf b/modules/cat/cat/main.nf index 25dcc652..b02ca7ee 100644 --- a/modules/cat/cat/main.nf +++ b/modules/cat/cat/main.nf @@ -47,4 +47,14 @@ process CAT_CAT { pigz: \$( pigz --version 2>&1 | sed 's/pigz //g' ) END_VERSIONS """ + + stub: + """ + touch $prefix + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + pigz: \$( pigz --version 2>&1 | sed 's/pigz //g' ) + END_VERSIONS + """ } diff --git a/modules/deepvariant/main.nf b/modules/deepvariant/main.nf index 8e5f10df..f0c6b47b 100644 --- a/modules/deepvariant/main.nf +++ b/modules/deepvariant/main.nf @@ -44,4 +44,16 @@ process DEEPVARIANT { deepvariant: \$(echo \$(/opt/deepvariant/bin/run_deepvariant --version) | sed 's/^.*version //; s/ .*\$//' ) END_VERSIONS """ + + stub: + def prefix = task.ext.prefix ?: "${meta.id}" + """ + touch ${prefix}.vcf.gz + touch ${prefix}.g.vcf.gz + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + deepvariant: \$(echo \$(/opt/deepvariant/bin/run_deepvariant --version) | sed 's/^.*version //; s/ .*\$//' ) + END_VERSIONS + """ } diff --git a/modules/expansionhunter/main.nf b/modules/expansionhunter/main.nf index 4e62b2a6..f60b75b4 100644 --- a/modules/expansionhunter/main.nf +++ b/modules/expansionhunter/main.nf @@ -37,4 +37,15 @@ process EXPANSIONHUNTER { expansionhunter: \$( echo \$(ExpansionHunter --version 2>&1) | sed 's/^.*ExpansionHunter v//') END_VERSIONS """ + + stub: + def prefix = task.ext.prefix ?: "${meta.id}" + """ + touch ${prefix}.vcf + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + expansionhunter: \$( echo \$(ExpansionHunter --version 2>&1) | sed 's/^.*ExpansionHunter v//') + END_VERSIONS + """ } diff --git a/modules/fastqc/main.nf b/modules/fastqc/main.nf index ed6b8c50..05730368 100644 --- a/modules/fastqc/main.nf +++ b/modules/fastqc/main.nf @@ -44,4 +44,16 @@ process FASTQC { END_VERSIONS """ } + + stub: + def prefix = task.ext.prefix ?: "${meta.id}" + """ + touch ${prefix}.html + touch ${prefix}.zip + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + fastqc: \$( fastqc --version | sed -e "s/FastQC v//g" ) + END_VERSIONS + """ } diff --git a/modules/gatk4/bedtointervallist/main.nf b/modules/gatk4/bedtointervallist/main.nf index 74256dd1..c3b624a8 100644 --- a/modules/gatk4/bedtointervallist/main.nf +++ b/modules/gatk4/bedtointervallist/main.nf @@ -39,4 +39,15 @@ process GATK4_BEDTOINTERVALLIST { gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ + + stub: + def prefix = task.ext.prefix ?: "${meta.id}" + """ + touch ${prefix}.interval_list + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + END_VERSIONS + """ } diff --git a/modules/gatk4/createsequencedictionary/main.nf b/modules/gatk4/createsequencedictionary/main.nf index 87d52a59..fe1a6c65 100644 --- a/modules/gatk4/createsequencedictionary/main.nf +++ b/modules/gatk4/createsequencedictionary/main.nf @@ -37,4 +37,15 @@ process GATK4_CREATESEQUENCEDICTIONARY { gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ + + stub: + def prefix = task.ext.prefix ?: "${meta.id}" + """ + touch ${prefix}.dict + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + END_VERSIONS + """ } diff --git a/modules/gatk4/intervallisttools/main.nf b/modules/gatk4/intervallisttools/main.nf index 352a3240..82c3222c 100644 --- a/modules/gatk4/intervallisttools/main.nf +++ b/modules/gatk4/intervallisttools/main.nf @@ -51,4 +51,22 @@ process GATK4_INTERVALLISTTOOLS { gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ + + stub: + def prefix = task.ext.prefix ?: "${meta.id}" + """ + mkdir -p ${prefix}_split/temp_0001_of_6 + mkdir -p ${prefix}_split/temp_0002_of_6 + mkdir -p ${prefix}_split/temp_0003_of_6 + mkdir -p ${prefix}_split/temp_0004_of_6 + touch ${prefix}_split/temp_0001_of_6/1scattered.interval_list + touch ${prefix}_split/temp_0002_of_6/2scattered.interval_list + touch ${prefix}_split/temp_0003_of_6/3scattered.interval_list + touch ${prefix}_split/temp_0004_of_6/4scattered.interval_list + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + END_VERSIONS + """ } diff --git a/modules/glnexus/main.nf b/modules/glnexus/main.nf index 84da95a0..a26ab4ce 100644 --- a/modules/glnexus/main.nf +++ b/modules/glnexus/main.nf @@ -42,4 +42,15 @@ process GLNEXUS { glnexus: \$( echo \$(glnexus_cli 2>&1) | head -n 1 | sed 's/^.*release v//; s/ .*\$//') END_VERSIONS """ + + stub: + def prefix = task.ext.prefix ?: "${meta.id}" + """ + touch ${prefix}.bcf + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + glnexus: \$( echo \$(glnexus_cli 2>&1) | head -n 1 | sed 's/^.*release v//; s/ .*\$//') + END_VERSIONS + """ } diff --git a/modules/multiqc/main.nf b/modules/multiqc/main.nf index 1264aac1..ae019dbf 100644 --- a/modules/multiqc/main.nf +++ b/modules/multiqc/main.nf @@ -28,4 +28,16 @@ process MULTIQC { multiqc: \$( multiqc --version | sed -e "s/multiqc, version //g" ) END_VERSIONS """ + + stub: + """ + touch multiqc_data + touch multiqc_plots + touch multiqc_report.html + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + multiqc: \$( multiqc --version | sed -e "s/multiqc, version //g" ) + END_VERSIONS + """ } diff --git a/modules/picard/collecthsmetrics/main.nf b/modules/picard/collecthsmetrics/main.nf index 07e8504f..3acf8bb8 100644 --- a/modules/picard/collecthsmetrics/main.nf +++ b/modules/picard/collecthsmetrics/main.nf @@ -48,4 +48,15 @@ process PICARD_COLLECTHSMETRICS { picard: \$(echo \$(picard CollectHsMetrics --version 2>&1) | grep -o 'Version:.*' | cut -f2- -d:) END_VERSIONS """ + + stub: + def prefix = task.ext.prefix ?: "${meta.id}" + """ + touch ${prefix}_collecthsmetrics.txt + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + picard: \$(echo \$(picard CollectHsMetrics --version 2>&1) | grep -o 'Version:.*' | cut -f2- -d:) + END_VERSIONS + """ } diff --git a/modules/picard/collectmultiplemetrics/main.nf b/modules/picard/collectmultiplemetrics/main.nf index e023ea3c..340463a8 100644 --- a/modules/picard/collectmultiplemetrics/main.nf +++ b/modules/picard/collectmultiplemetrics/main.nf @@ -42,4 +42,24 @@ process PICARD_COLLECTMULTIPLEMETRICS { picard: \$(picard CollectMultipleMetrics --version 2>&1 | grep -o 'Version.*' | cut -f2- -d:) END_VERSIONS """ + + stub: + def prefix = task.ext.prefix ?: "${meta.id}" + """ + touch ${prefix}.CollectMultipleMetrics.alignment_summary_metrics + touch ${prefix}.CollectMultipleMetrics.insert_size_metrics + touch ${prefix}.CollectMultipleMetrics.quality_distribution.pdf + touch ${prefix}.CollectMultipleMetrics.base_distribution_by_cycle_metrics + touch ${prefix}.CollectMultipleMetrics.quality_by_cycle_metrics + touch ${prefix}.CollectMultipleMetrics.read_length_histogram.pdf + touch ${prefix}.CollectMultipleMetrics.base_distribution_by_cycle.pdf + touch ${prefix}.CollectMultipleMetrics.quality_by_cycle.pdf + touch ${prefix}.CollectMultipleMetrics.insert_size_histogram.pdf + touch ${prefix}.CollectMultipleMetrics.quality_distribution_metrics + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + picard: \$(echo \$(picard CollectMultipleMetrics --version 2>&1) | grep -o 'Version:.*' | cut -f2- -d:) + END_VERSIONS + """ } diff --git a/modules/picard/markduplicates/main.nf b/modules/picard/markduplicates/main.nf index 5196b6ed..e754a587 100644 --- a/modules/picard/markduplicates/main.nf +++ b/modules/picard/markduplicates/main.nf @@ -42,4 +42,17 @@ process PICARD_MARKDUPLICATES { picard: \$(echo \$(picard MarkDuplicates --version 2>&1) | grep -o 'Version:.*' | cut -f2- -d:) END_VERSIONS """ + + stub: + def prefix = task.ext.prefix ?: "${meta.id}" + """ + touch ${prefix}.bam + touch ${prefix}.bam.bai + touch ${prefix}.MarkDuplicates.metrics.txt + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + picard: \$(echo \$(picard MarkDuplicates --version 2>&1) | grep -o 'Version:.*' | cut -f2- -d:) + END_VERSIONS + """ } diff --git a/modules/picard/sortvcf/main.nf b/modules/picard/sortvcf/main.nf index 0f10c1ab..4047545e 100644 --- a/modules/picard/sortvcf/main.nf +++ b/modules/picard/sortvcf/main.nf @@ -46,4 +46,17 @@ process PICARD_SORTVCF { picard: \$(picard SortVcf --version 2>&1 | grep -o 'Version:.*' | cut -f2- -d:) END_VERSIONS """ + + stub: + def prefix = task.ext.prefix ?: "${meta.id}" + """ + touch ${prefix}_sorted.vcf.gz + touch ${prefix}.bam.bai + touch ${prefix}.MarkDuplicates.metrics.txt + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + picard: \$(picard SortVcf --version 2>&1 | grep -o 'Version:.*' | cut -f2- -d:) + END_VERSIONS + """ } diff --git a/modules/qualimap/bamqc/main.nf b/modules/qualimap/bamqc/main.nf index 92f38f8c..3bfcb4c1 100644 --- a/modules/qualimap/bamqc/main.nf +++ b/modules/qualimap/bamqc/main.nf @@ -52,4 +52,72 @@ process QUALIMAP_BAMQC { qualimap: \$(echo \$(qualimap 2>&1) | sed 's/^.*QualiMap v.//; s/Built.*\$//') END_VERSIONS """ + + stub: + prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + """ + mkdir -p $prefix/css + mkdir $prefix/images_qualimapReport + mkdir $prefix/raw_data_qualimapReport + cd $prefix/css + touch agogo.css + touch basic.css + touch bgtop.png + touch comment-close.png + touch doctools.js + touch down-pressed.png + touch jquery.js + touch plus.png + touch qualimap_logo_small.png + touch searchtools.js + touch up.png + touch websupport.js + touch ajax-loader.gif + touch bgfooter.png + touch comment-bright.png + touch comment.png + touch down.png + touch file.png + touch minus.png + touch pygments.css + touch report.css + touch underscore.js + touch up-pressed.png + cd ../images_qualimapReport/ + touch genome_coverage_0to50_histogram.png + touch genome_coverage_quotes.png + touch genome_insert_size_across_reference.png + touch genome_mapping_quality_histogram.png + touch genome_uniq_read_starts_histogram.png + touch genome_coverage_across_reference.png + touch genome_gc_content_per_window.png + touch genome_insert_size_histogram.png + touch genome_reads_clipping_profile.png + touch genome_coverage_histogram.png + touch genome_homopolymer_indels.png + touch genome_mapping_quality_across_reference.png + touch genome_reads_content_per_read_position.png + cd ../raw_data_qualimapReport + touch coverage_across_reference.txt + touch genome_fraction_coverage.txt + touch insert_size_histogram.txt + touch mapped_reads_nucleotide_content.txt + touch coverage_histogram.txt + touch homopolymer_indels.txt + touch mapped_reads_clipping_profile.txt + touch mapping_quality_across_reference.txt + touch duplication_rate_histogram.txt + touch insert_size_across_reference.txt + touch mapped_reads_gc-content_distribution.txt + touch mapping_quality_histogram.txt + cd ../ + touch genome_results.txt + touch qualimapReport.html + cd ../ + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + qualimap: \$(echo \$(qualimap 2>&1) | sed 's/^.*QualiMap v.//; s/Built.*\$//') + END_VERSIONS + """ } diff --git a/modules/samtools/faidx/main.nf b/modules/samtools/faidx/main.nf index 7732a4ec..053279ff 100644 --- a/modules/samtools/faidx/main.nf +++ b/modules/samtools/faidx/main.nf @@ -29,4 +29,14 @@ process SAMTOOLS_FAIDX { samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ + + stub: + """ + touch ${fasta}.fai + cat <<-END_VERSIONS > versions.yml + + "${task.process}": + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + END_VERSIONS + """ } diff --git a/modules/samtools/index/main.nf b/modules/samtools/index/main.nf index e41cdcc8..fff6e1b8 100644 --- a/modules/samtools/index/main.nf +++ b/modules/samtools/index/main.nf @@ -33,4 +33,16 @@ process SAMTOOLS_INDEX { samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ + + stub: + """ + touch ${input}.bai + touch ${input}.crai + touch ${input}.csi + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + END_VERSIONS + """ } diff --git a/modules/samtools/merge/main.nf b/modules/samtools/merge/main.nf index 7b771677..9f962a4b 100644 --- a/modules/samtools/merge/main.nf +++ b/modules/samtools/merge/main.nf @@ -38,4 +38,16 @@ process SAMTOOLS_MERGE { samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ + + stub: + prefix = task.ext.suffix ? "${meta.id}${task.ext.suffix}" : "${meta.id}" + def file_type = input_files[0].getExtension() + """ + touch ${prefix}.${file_type} + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + END_VERSIONS + """ } diff --git a/modules/samtools/sort/main.nf b/modules/samtools/sort/main.nf index 0e2de8ba..ba46f0c9 100644 --- a/modules/samtools/sort/main.nf +++ b/modules/samtools/sort/main.nf @@ -28,4 +28,15 @@ process SAMTOOLS_SORT { samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ + + stub: + def prefix = task.ext.prefix ?: "${meta.id}" + """ + touch ${prefix}.bam + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + END_VERSIONS + """ } diff --git a/modules/samtools/stats/main.nf b/modules/samtools/stats/main.nf index 6efc9d9a..85cb64f3 100644 --- a/modules/samtools/stats/main.nf +++ b/modules/samtools/stats/main.nf @@ -34,4 +34,15 @@ process SAMTOOLS_STATS { samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ + + stub: + def prefix = task.ext.prefix ?: "${meta.id}" + """ + touch ${input}.stats + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + END_VERSIONS + """ } diff --git a/modules/svdb/query/main.nf b/modules/svdb/query/main.nf index 37ce432c..15609bad 100644 --- a/modules/svdb/query/main.nf +++ b/modules/svdb/query/main.nf @@ -70,5 +70,4 @@ process SVDB_QUERY { svdb: \$( echo \$(svdb) | head -1 | sed 's/usage: SVDB-\\([0-9]\\.[0-9]\\.[0-9]\\).*/\\1/' ) END_VERSIONS """ - } diff --git a/modules/tabix/bgziptabix/main.nf b/modules/tabix/bgziptabix/main.nf index 12657599..77fd91a5 100644 --- a/modules/tabix/bgziptabix/main.nf +++ b/modules/tabix/bgziptabix/main.nf @@ -30,4 +30,16 @@ process TABIX_BGZIPTABIX { tabix: \$(echo \$(tabix -h 2>&1) | sed 's/^.*Version: //; s/ .*\$//') END_VERSIONS """ + + stub: + def prefix = task.ext.prefix ?: "${meta.id}" + """ + touch ${prefix}.gz + touch ${prefix}.gz.tbi + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + tabix: \$(echo \$(tabix -h 2>&1) | sed 's/^.*Version: //; s/ .*\$//') + END_VERSIONS + """ } diff --git a/modules/tabix/tabix/main.nf b/modules/tabix/tabix/main.nf index 5f516261..c9dab068 100644 --- a/modules/tabix/tabix/main.nf +++ b/modules/tabix/tabix/main.nf @@ -27,4 +27,15 @@ process TABIX_TABIX { tabix: \$(echo \$(tabix -h 2>&1) | sed 's/^.*Version: //; s/ .*\$//') END_VERSIONS """ + + stub: + def prefix = task.ext.prefix ?: "${meta.id}" + """ + touch ${tab}.tbi + cat <<-END_VERSIONS > versions.yml + + "${task.process}": + tabix: \$(echo \$(tabix -h 2>&1) | sed 's/^.*Version: //; s/ .*\$//') + END_VERSIONS + """ } diff --git a/modules/tiddit/cov/main.nf b/modules/tiddit/cov/main.nf index a5f8c649..f805cfa7 100644 --- a/modules/tiddit/cov/main.nf +++ b/modules/tiddit/cov/main.nf @@ -36,4 +36,16 @@ process TIDDIT_COV { tiddit: \$(echo \$(tiddit 2>&1) | sed 's/^.*TIDDIT-//; s/ .*\$//') END_VERSIONS """ + + stub: + def prefix = task.ext.prefix ?: "${meta.id}" + """ + touch $prefix.wig + touch $prefix.tab + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + tiddit: \$(echo \$(tiddit 2>&1) | sed 's/^.*TIDDIT-//; s/ .*\$//') + END_VERSIONS + """ } diff --git a/modules/tiddit/sv/main.nf b/modules/tiddit/sv/main.nf index 454dfc54..872eeed1 100644 --- a/modules/tiddit/sv/main.nf +++ b/modules/tiddit/sv/main.nf @@ -38,4 +38,17 @@ process TIDDIT_SV { tiddit: \$(echo \$(tiddit 2>&1) | sed 's/^.*TIDDIT-//; s/ .*\$//') END_VERSIONS """ + + stub: + def prefix = task.ext.prefix ?: "${meta.id}" + """ + touch $prefix.vcf + touch $prefix.ploidy.tab + touch $prefix.signals.tab + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + tiddit: \$(echo \$(tiddit 2>&1) | sed 's/^.*TIDDIT-//; s/ .*\$//') + END_VERSIONS + """ } diff --git a/modules/ucsc/wigtobigwig/main.nf b/modules/ucsc/wigtobigwig/main.nf index d07e7ec1..2af7190b 100644 --- a/modules/ucsc/wigtobigwig/main.nf +++ b/modules/ucsc/wigtobigwig/main.nf @@ -35,4 +35,15 @@ process UCSC_WIGTOBIGWIG { ucsc: $VERSION END_VERSIONS """ + + stub: + def prefix = task.ext.prefix ?: "${meta.id}" + """ + touch ${prefix}.bw + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + ucsc: $VERSION + END_VERSIONS + """ } diff --git a/modules/untar/main.nf b/modules/untar/main.nf index dc43fb78..5aa6aa7f 100644 --- a/modules/untar/main.nf +++ b/modules/untar/main.nf @@ -33,4 +33,15 @@ process UNTAR { untar: \$(echo \$(tar --version 2>&1) | sed 's/^.*(GNU tar) //; s/ Copyright.*\$//') END_VERSIONS """ + + stub: + untar = archive.toString() - '.tar.gz' + """ + touch $untar + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + untar: \$(echo \$(tar --version 2>&1) | sed 's/^.*(GNU tar) //; s/ Copyright.*\$//') + END_VERSIONS + """ } diff --git a/modules/vcfanno/main.nf b/modules/vcfanno/main.nf index 51b1ec5b..bc0514c9 100644 --- a/modules/vcfanno/main.nf +++ b/modules/vcfanno/main.nf @@ -39,4 +39,15 @@ process VCFANNO { vcfanno: \$(echo \$(vcfanno 2>&1 | grep version | cut -f3 -d' ' )) END_VERSIONS """ + + stub: + def prefix = task.ext.prefix ?: "${meta.id}" + """ + touch ${prefix}_annotated.vcf + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + vcfanno: \$(echo \$(vcfanno 2>&1 | grep version | cut -f3 -d' ' )) + END_VERSIONS + """ } diff --git a/tests/modules/expansionhunter/main.nf b/tests/modules/expansionhunter/main.nf index 91faeeb8..d0221234 100644 --- a/tests/modules/expansionhunter/main.nf +++ b/tests/modules/expansionhunter/main.nf @@ -7,8 +7,8 @@ include { EXPANSIONHUNTER } from '../../../modules/expansionhunter/main.nf' workflow test_expansionhunter { input = [ [ id:'test', gender:'male' ], // meta map - file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_bam'], checkIfExists: true), - file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_bam_bai'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true), ] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) variant_catalog = file(params.test_data['homo_sapiens']['genome']['repeat_expansions'], checkIfExists: true) diff --git a/tests/modules/expansionhunter/test.yml b/tests/modules/expansionhunter/test.yml index 19403588..f9282f8c 100644 --- a/tests/modules/expansionhunter/test.yml +++ b/tests/modules/expansionhunter/test.yml @@ -4,4 +4,4 @@ - expansionhunter files: - path: output/expansionhunter/test.vcf - md5sum: ef6c2101d7bd67211bb5a5a132690e02 + md5sum: cfd4a1d35c0e469b99eb6aaa6d22de76 From dbb46c9b635080b132bab4b8d5b9a14f0d1c22e7 Mon Sep 17 00:00:00 2001 From: Jose Espinosa-Carrasco Date: Tue, 29 Mar 2022 16:44:19 +0200 Subject: [PATCH 079/283] Bump chromap version 0.2.1 (#1462) --- modules/chromap/chromap/main.nf | 6 +++--- modules/chromap/index/main.nf | 8 ++++---- tests/modules/chromap/chromap/test.yml | 8 ++++---- tests/modules/chromap/index/test.yml | 2 +- 4 files changed, 12 insertions(+), 12 deletions(-) diff --git a/modules/chromap/chromap/main.nf b/modules/chromap/chromap/main.nf index 4ee86b92..bf3d1234 100644 --- a/modules/chromap/chromap/main.nf +++ b/modules/chromap/chromap/main.nf @@ -2,10 +2,10 @@ process CHROMAP_CHROMAP { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? "bioconda::chromap=0.2.0 bioconda::samtools=1.14" : null) + conda (params.enable_conda ? "bioconda::chromap=0.2.1 bioconda::samtools=1.15" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/mulled-v2-1f09f39f20b1c4ee36581dc81cc323c70e661633:ed3529ef5253d7ccbc688b6a4c5c447152685757-0' : - 'quay.io/biocontainers/mulled-v2-1f09f39f20b1c4ee36581dc81cc323c70e661633:ed3529ef5253d7ccbc688b6a4c5c447152685757-0' }" + 'https://depot.galaxyproject.org/singularity/mulled-v2-1f09f39f20b1c4ee36581dc81cc323c70e661633:bd74d08a359024829a7aec1638a28607bbcd8a58-0' : + 'quay.io/biocontainers/mulled-v2-1f09f39f20b1c4ee36581dc81cc323c70e661633:bd74d08a359024829a7aec1638a28607bbcd8a58-0' }" input: tuple val(meta), path(reads) diff --git a/modules/chromap/index/main.nf b/modules/chromap/index/main.nf index 2696d6a5..ee370695 100644 --- a/modules/chromap/index/main.nf +++ b/modules/chromap/index/main.nf @@ -1,11 +1,11 @@ process CHROMAP_INDEX { - tag '$fasta' + tag "$fasta" label 'process_medium' - conda (params.enable_conda ? "bioconda::chromap=0.2.0" : null) + conda (params.enable_conda ? "bioconda::chromap=0.2.1" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/chromap:0.2.0--hd03093a_1' : - 'quay.io/biocontainers/chromap:0.2.0--hd03093a_1' }" + 'https://depot.galaxyproject.org/singularity/chromap:0.2.1--hd03093a_0' : + 'quay.io/biocontainers/chromap:0.2.1--hd03093a_0' }" input: path fasta diff --git a/tests/modules/chromap/chromap/test.yml b/tests/modules/chromap/chromap/test.yml index 40e45959..d76370b2 100644 --- a/tests/modules/chromap/chromap/test.yml +++ b/tests/modules/chromap/chromap/test.yml @@ -8,7 +8,7 @@ - path: output/chromap/test.bed.gz md5sum: 25e40bde24c7b447292cd68573728694 - path: output/chromap/versions.yml - md5sum: 2d3d2959ac20d98036807964896829e7 + md5sum: d24cfc35ad958206a5bc5694221b4fae - name: chromap chromap test_chromap_chromap_paired_end command: nextflow run ./tests/modules/chromap/chromap -entry test_chromap_chromap_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/chromap/chromap/nextflow.config @@ -20,7 +20,7 @@ - path: output/chromap/test.bed.gz md5sum: 7cdc8448882b75811e0c784f5f20aef2 - path: output/chromap/versions.yml - md5sum: 51cff66779161d8a602cce5989017395 + md5sum: 68ffe268a9d460956de4aad2a55ffd68 - name: chromap chromap test_chromap_chromap_paired_bam command: nextflow run ./tests/modules/chromap/chromap -entry test_chromap_chromap_paired_bam -c ./tests/config/nextflow.config -c ./tests/modules/chromap/chromap/nextflow.config @@ -30,6 +30,6 @@ files: - path: output/chromap/genome.index - path: output/chromap/test.bam - md5sum: f255c7441d5a1f307fc642d2aa19647e + md5sum: df467417407408e42992dc3dd15b22f5 - path: output/chromap/versions.yml - md5sum: f91910c44169549c3923931de5c3afcb + md5sum: ea732b4c6f1312d09745b66c3963dd3f diff --git a/tests/modules/chromap/index/test.yml b/tests/modules/chromap/index/test.yml index b2aa37d8..3e1d6fa6 100644 --- a/tests/modules/chromap/index/test.yml +++ b/tests/modules/chromap/index/test.yml @@ -6,4 +6,4 @@ files: - path: output/chromap/genome.index - path: output/chromap/versions.yml - md5sum: b75dec647f9dc5f4887f36d1db7a9ccd + md5sum: fc5c80190d0622ea3e979e6862f8e32b From 1ec1868264a8429e02ee3dca881b69c80239c9dc Mon Sep 17 00:00:00 2001 From: FriederikeHanssen Date: Tue, 29 Mar 2022 19:13:41 +0200 Subject: [PATCH 080/283] Fix typo (#1464) --- modules/controlfreec/freec/main.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/controlfreec/freec/main.nf b/modules/controlfreec/freec/main.nf index ba48ea1e..eb66eeaa 100644 --- a/modules/controlfreec/freec/main.nf +++ b/modules/controlfreec/freec/main.nf @@ -41,7 +41,7 @@ process CONTROLFREEC_FREEC { def chr_length = fai ? "chrLenFile = \${PWD}/${fai}" : "" def breakpointthreshold = task.ext.args?["general"]?["breakpointthreshold"] ? "breakPointThreshold = ${task.ext.args["general"]["breakpointthreshold"]}" : "" def breakpointtype = task.ext.args?["general"]?["breakpointtype"] ? "breakPointType = ${task.ext.args["general"]["breakpointtype"]}" : "" - def coefficientofvariation = task.ext.args?["general"]?["coefficient"] ? "coefficientOfVariation = ${task.ext.args["general"]["coefficientofvariation"]}" : "" + def coefficientofvariation = task.ext.args?["general"]?["coefficientofvariation"] ? "coefficientOfVariation = ${task.ext.args["general"]["coefficientofvariation"]}" : "" def contamination = task.ext.args?["general"]?["contamination"] ? "contamination = ${task.ext.args["general"]["contamination"]}" : "" def contaminationadjustment = task.ext.args?["general"]?["contaminationadjustment"] ? "contaminationAdjustment = ${task.ext.args["general"]["contaminationadjustment"]}" : "" def degree = task.ext.args?["general"]?["degree"] ? "degree = ${task.ext.args["general"]["degree"]}" : "" From a973b68200da801be1c2e27b8fd72d7256f5db6b Mon Sep 17 00:00:00 2001 From: Ramprasad Neethiraj <20065894+ramprasadn@users.noreply.github.com> Date: Wed, 30 Mar 2022 14:50:27 +0200 Subject: [PATCH 081/283] svdb update to 2.6.0 (#1465) --- modules/svdb/merge/main.nf | 6 +++--- modules/svdb/query/main.nf | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/modules/svdb/merge/main.nf b/modules/svdb/merge/main.nf index 9e729bf3..4a39940c 100644 --- a/modules/svdb/merge/main.nf +++ b/modules/svdb/merge/main.nf @@ -2,10 +2,10 @@ process SVDB_MERGE { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? "bioconda::svdb=2.5.2" : null) + conda (params.enable_conda ? "bioconda::svdb=2.6.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/svdb:2.5.2--py39h5371cbf_0': - 'quay.io/biocontainers/svdb:2.5.2--py39h5371cbf_0' }" + 'https://depot.galaxyproject.org/singularity/svdb:2.6.0--py39h5371cbf_0': + 'quay.io/biocontainers/svdb:2.6.0--py39h5371cbf_0' }" input: tuple val(meta), path(vcfs) diff --git a/modules/svdb/query/main.nf b/modules/svdb/query/main.nf index 15609bad..c669b5a5 100644 --- a/modules/svdb/query/main.nf +++ b/modules/svdb/query/main.nf @@ -2,10 +2,10 @@ process SVDB_QUERY { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? "bioconda::svdb=2.5.2" : null) + conda (params.enable_conda ? "bioconda::svdb=2.6.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/svdb:2.5.2--py39h5371cbf_0': - 'quay.io/biocontainers/svdb:2.5.2--py39h5371cbf_0' }" + 'https://depot.galaxyproject.org/singularity/svdb:2.6.0--py39h5371cbf_0': + 'quay.io/biocontainers/svdb:2.6.0--py39h5371cbf_0' }" input: tuple val(meta), path(vcf) From fd5f6f5f4ffef4ab5a4e809bd3211bbc71c38d30 Mon Sep 17 00:00:00 2001 From: Ramprasad Neethiraj <20065894+ramprasadn@users.noreply.github.com> Date: Wed, 30 Mar 2022 20:21:06 +0200 Subject: [PATCH 082/283] Revert recent changes made to some of the modules (#1463) * revert changes to modules * fix tests * fix kraken2 * fix untar * fix cat * add blank lines * fix typo --- modules/cat/cat/main.nf | 2 +- modules/deepvariant/main.nf | 2 +- .../gatk4/createsequencedictionary/main.nf | 3 +- modules/manta/germline/main.nf | 3 +- modules/tiddit/cov/main.nf | 4 +-- modules/tiddit/sv/main.nf | 6 ++-- tests/modules/manta/germline/main.nf | 30 +++++++++++-------- 7 files changed, 27 insertions(+), 23 deletions(-) diff --git a/modules/cat/cat/main.nf b/modules/cat/cat/main.nf index b02ca7ee..09a41561 100644 --- a/modules/cat/cat/main.nf +++ b/modules/cat/cat/main.nf @@ -30,7 +30,7 @@ process CAT_CAT { // | ungzipped | gzipped | cat | pigz | // Use input file ending as default - prefix = task.ext.prefix ?: "${meta.id}${file_list[0].substring(file_list[0].lastIndexOf('.'))}" + prefix = task.ext.prefix ?: "${meta.id}${file_list[0].substring(file_list[0].lastIndexOf('.'))}" out_zip = prefix.endsWith('.gz') in_zip = file_list[0].endsWith('.gz') command1 = (in_zip && !out_zip) ? 'zcat' : 'cat' diff --git a/modules/deepvariant/main.nf b/modules/deepvariant/main.nf index f0c6b47b..e2a0bee7 100644 --- a/modules/deepvariant/main.nf +++ b/modules/deepvariant/main.nf @@ -46,7 +46,7 @@ process DEEPVARIANT { """ stub: - def prefix = task.ext.prefix ?: "${meta.id}" + prefix = task.ext.prefix ?: "${meta.id}" """ touch ${prefix}.vcf.gz touch ${prefix}.g.vcf.gz diff --git a/modules/gatk4/createsequencedictionary/main.nf b/modules/gatk4/createsequencedictionary/main.nf index fe1a6c65..dea77a1d 100644 --- a/modules/gatk4/createsequencedictionary/main.nf +++ b/modules/gatk4/createsequencedictionary/main.nf @@ -39,9 +39,8 @@ process GATK4_CREATESEQUENCEDICTIONARY { """ stub: - def prefix = task.ext.prefix ?: "${meta.id}" """ - touch ${prefix}.dict + touch test.dict cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/modules/manta/germline/main.nf b/modules/manta/germline/main.nf index ef6bd4a3..5ddba51b 100644 --- a/modules/manta/germline/main.nf +++ b/modules/manta/germline/main.nf @@ -8,9 +8,10 @@ process MANTA_GERMLINE { 'quay.io/biocontainers/manta:1.6.0--h9ee0642_1' }" input: - tuple val(meta), path(input), path(index), path(target_bed), path(target_bed_tbi) + tuple val(meta), path(input), path(index) path fasta path fasta_fai + tuple path(target_bed), path(target_bed_tbi) output: diff --git a/modules/tiddit/cov/main.nf b/modules/tiddit/cov/main.nf index f805cfa7..578c4043 100644 --- a/modules/tiddit/cov/main.nf +++ b/modules/tiddit/cov/main.nf @@ -40,8 +40,8 @@ process TIDDIT_COV { stub: def prefix = task.ext.prefix ?: "${meta.id}" """ - touch $prefix.wig - touch $prefix.tab + touch ${prefix}.wig + touch ${prefix}.tab cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/modules/tiddit/sv/main.nf b/modules/tiddit/sv/main.nf index 872eeed1..1bf7146a 100644 --- a/modules/tiddit/sv/main.nf +++ b/modules/tiddit/sv/main.nf @@ -42,9 +42,9 @@ process TIDDIT_SV { stub: def prefix = task.ext.prefix ?: "${meta.id}" """ - touch $prefix.vcf - touch $prefix.ploidy.tab - touch $prefix.signals.tab + touch ${prefix}.vcf + touch ${prefix}.ploidy.tab + touch ${prefix}.signals.tab cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/tests/modules/manta/germline/main.nf b/tests/modules/manta/germline/main.nf index 0081c29f..bad62629 100644 --- a/tests/modules/manta/germline/main.nf +++ b/tests/modules/manta/germline/main.nf @@ -7,28 +7,30 @@ include { MANTA_GERMLINE } from '../../../../modules/manta/germline/main.nf' workflow test_manta_germline { input = [ [ id:'test'], // meta map - file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram'], checkIfExists: true), - file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram_crai'], checkIfExists: true), - [],[] + [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram'], checkIfExists: true)], + [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram_crai'], checkIfExists: true)] ] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + bed = [[],[]] - MANTA_GERMLINE ( input, fasta, fai ) + MANTA_GERMLINE ( input, fasta, fai, bed ) } workflow test_manta_germline_target_bed { input = [ [ id:'test'], // meta map - file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram'], checkIfExists: true), - file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram_crai'], checkIfExists: true), - file(params.test_data['homo_sapiens']['genome']['genome_bed_gz'], checkIfExists: true), - file(params.test_data['homo_sapiens']['genome']['genome_bed_gz_tbi'], checkIfExists: true) + [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram'], checkIfExists: true)], + [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram_crai'], checkIfExists: true)] ] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + bed = [ + file(params.test_data['homo_sapiens']['genome']['genome_bed_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['genome']['genome_bed_gz_tbi'], checkIfExists: true), + ] - MANTA_GERMLINE ( input, fasta, fai ) + MANTA_GERMLINE ( input, fasta, fai, bed ) } workflow test_manta_germline_target_bed_jointcalling { @@ -37,12 +39,14 @@ workflow test_manta_germline_target_bed_jointcalling { [file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram'], checkIfExists: true), file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_cram'], checkIfExists: true)], [file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram_crai'], checkIfExists: true), - file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_cram_crai'], checkIfExists: true),], - file(params.test_data['homo_sapiens']['genome']['genome_bed_gz'], checkIfExists: true), - file(params.test_data['homo_sapiens']['genome']['genome_bed_gz_tbi'], checkIfExists: true) + file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_cram_crai'], checkIfExists: true),] ] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + bed = [ + file(params.test_data['homo_sapiens']['genome']['genome_bed_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['genome']['genome_bed_gz_tbi'], checkIfExists: true), + ] - MANTA_GERMLINE ( input, fasta, fai ) + MANTA_GERMLINE ( input, fasta, fai, bed ) } From 0de6406217802e0bcbef89f7568b346904f5236c Mon Sep 17 00:00:00 2001 From: "Moritz E. Beber" Date: Wed, 30 Mar 2022 23:01:17 +0200 Subject: [PATCH 083/283] feat: add module for seqkit stats (#1466) --- modules/seqkit/stats/main.nf | 34 +++++++++++++ modules/seqkit/stats/meta.yml | 44 ++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/seqkit/stats/main.nf | 58 ++++++++++++++++++++++ tests/modules/seqkit/stats/nextflow.config | 5 ++ tests/modules/seqkit/stats/test.yml | 54 ++++++++++++++++++++ 6 files changed, 199 insertions(+) create mode 100644 modules/seqkit/stats/main.nf create mode 100644 modules/seqkit/stats/meta.yml create mode 100644 tests/modules/seqkit/stats/main.nf create mode 100644 tests/modules/seqkit/stats/nextflow.config create mode 100644 tests/modules/seqkit/stats/test.yml diff --git a/modules/seqkit/stats/main.nf b/modules/seqkit/stats/main.nf new file mode 100644 index 00000000..28ac77b7 --- /dev/null +++ b/modules/seqkit/stats/main.nf @@ -0,0 +1,34 @@ +process SEQKIT_STATS { + tag "$meta.id" + label 'process_low' + + conda (params.enable_conda ? "bioconda::seqkit=2.2.0" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/seqkit:2.2.0--h9ee0642_0': + 'quay.io/biocontainers/seqkit:2.2.0--h9ee0642_0' }" + + input: + tuple val(meta), path(reads) + + output: + tuple val(meta), path("*.tsv"), emit: stats + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '--all' + def prefix = task.ext.prefix ?: "${meta.id}" + """ + seqkit stats \\ + --tabular \\ + $args \\ + $reads > '${prefix}.tsv' + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + seqkit: \$( seqkit version | sed 's/seqkit v//' ) + END_VERSIONS + """ +} diff --git a/modules/seqkit/stats/meta.yml b/modules/seqkit/stats/meta.yml new file mode 100644 index 00000000..9d6e5b7e --- /dev/null +++ b/modules/seqkit/stats/meta.yml @@ -0,0 +1,44 @@ +name: "seqkit_stats" +description: simple statistics of FASTA/Q files +keywords: + - seqkit + - stats +tools: + - "seqkit": + description: Cross-platform and ultrafast toolkit for FASTA/Q file manipulation, written by Wei Shen. + homepage: https://bioinf.shenwei.me/seqkit/usage/ + documentation: https://bioinf.shenwei.me/seqkit/usage/ + tool_dev_url: https://github.com/shenwei356/seqkit/ + doi: "10.1371/journal.pone.0163962" + licence: ["MIT"] + +input: + - meta: + type: map + description: > + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: > + Either FASTA or FASTQ files. + pattern: "*.{fa,fna,faa,fasta,fq,fastq}[.gz]" + +output: + - meta: + type: map + description: > + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - stats: + type: file + description: > + Tab-separated output file with basic sequence statistics. + pattern: "*.tsv" + +authors: + - "@Midnighter" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 2a7b2f70..72ea6cf4 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -1591,6 +1591,10 @@ seqkit/split2: - modules/seqkit/split2/** - tests/modules/seqkit/split2/** +seqkit/stats: + - modules/seqkit/stats/** + - tests/modules/seqkit/stats/** + seqsero2: - modules/seqsero2/** - tests/modules/seqsero2/** diff --git a/tests/modules/seqkit/stats/main.nf b/tests/modules/seqkit/stats/main.nf new file mode 100644 index 00000000..77442115 --- /dev/null +++ b/tests/modules/seqkit/stats/main.nf @@ -0,0 +1,58 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { SEQKIT_STATS } from '../../../../modules/seqkit/stats/main.nf' + +workflow test_seqkit_stats_single_end { + + input = [ + [ id:'test', single_end:true ], // meta map + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] + + SEQKIT_STATS ( input ) +} + +workflow test_seqkit_stats_paired_end { + + input = [ + [ id:'test', single_end:false ], // meta map + [ + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) + ] + ] + + SEQKIT_STATS ( input ) +} + +workflow test_seqkit_stats_nanopore { + + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['nanopore']['test_fastq_gz'], checkIfExists: true), + ] + + SEQKIT_STATS ( input ) +} + +workflow test_seqkit_stats_genome_fasta { + + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true), + ] + + SEQKIT_STATS ( input ) +} + +workflow test_seqkit_stats_transcriptome_fasta { + + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['genome']['transcriptome_fasta'], checkIfExists: true), + ] + + SEQKIT_STATS ( input ) +} diff --git a/tests/modules/seqkit/stats/nextflow.config b/tests/modules/seqkit/stats/nextflow.config new file mode 100644 index 00000000..50f50a7a --- /dev/null +++ b/tests/modules/seqkit/stats/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} \ No newline at end of file diff --git a/tests/modules/seqkit/stats/test.yml b/tests/modules/seqkit/stats/test.yml new file mode 100644 index 00000000..fdf4533f --- /dev/null +++ b/tests/modules/seqkit/stats/test.yml @@ -0,0 +1,54 @@ +- name: seqkit stats test_seqkit_stats_single_end + command: nextflow run tests/modules/seqkit/stats -entry test_seqkit_stats_single_end -c tests/config/nextflow.config + tags: + - seqkit/stats + - seqkit + files: + - path: output/seqkit/test.tsv + md5sum: e23227d089a7e04b0ec0cb547c4aadff + - path: output/seqkit/versions.yml + md5sum: d67f0c16feb9df77b11f6c91bbdf9926 + +- name: seqkit stats test_seqkit_stats_paired_end + command: nextflow run tests/modules/seqkit/stats -entry test_seqkit_stats_paired_end -c tests/config/nextflow.config + tags: + - seqkit/stats + - seqkit + files: + - path: output/seqkit/test.tsv + md5sum: 9de20dc39fb01285e3f0c382fda9db52 + - path: output/seqkit/versions.yml + md5sum: bd8881933b953d07f2600e2e6a88ebf3 + +- name: seqkit stats test_seqkit_stats_nanopore + command: nextflow run tests/modules/seqkit/stats -entry test_seqkit_stats_nanopore -c tests/config/nextflow.config + tags: + - seqkit/stats + - seqkit + files: + - path: output/seqkit/test.tsv + md5sum: 5da1709eb5ae64fa3b2d624bffe2e7aa + - path: output/seqkit/versions.yml + md5sum: 565632701fbe048f7ba99f1865bd48ca + +- name: seqkit stats test_seqkit_stats_genome_fasta + command: nextflow run tests/modules/seqkit/stats -entry test_seqkit_stats_genome_fasta -c tests/config/nextflow.config + tags: + - seqkit/stats + - seqkit + files: + - path: output/seqkit/test.tsv + md5sum: f64489767a4e769539ef3faf83260184 + - path: output/seqkit/versions.yml + md5sum: 782fcdeaa922c8bb532ffa5808849d87 + +- name: seqkit stats test_seqkit_stats_transcriptome_fasta + command: nextflow run tests/modules/seqkit/stats -entry test_seqkit_stats_transcriptome_fasta -c tests/config/nextflow.config + tags: + - seqkit/stats + - seqkit + files: + - path: output/seqkit/test.tsv + md5sum: fbb975b665a08c8862fcd1268613a945 + - path: output/seqkit/versions.yml + md5sum: db99b016d986d26102ec398264a58410 From 233f2e728b175d8d3c398ce6dd052c6a14a3aeb7 Mon Sep 17 00:00:00 2001 From: Ramprasad Neethiraj <20065894+ramprasadn@users.noreply.github.com> Date: Thu, 31 Mar 2022 10:53:26 +0200 Subject: [PATCH 084/283] fix cnvpytor links (#1470) --- modules/cnvpytor/callcnvs/main.nf | 2 +- modules/cnvpytor/histogram/main.nf | 2 +- modules/cnvpytor/importreaddepth/main.nf | 2 +- modules/cnvpytor/partition/main.nf | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/modules/cnvpytor/callcnvs/main.nf b/modules/cnvpytor/callcnvs/main.nf index 1d47ce16..e296656b 100644 --- a/modules/cnvpytor/callcnvs/main.nf +++ b/modules/cnvpytor/callcnvs/main.nf @@ -4,7 +4,7 @@ process CNVPYTOR_CALLCNVS { conda (params.enable_conda ? "bioconda::cnvpytor=1.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/cnvpytor:A1.0--py39h6a678da_2': + 'https://depot.galaxyproject.org/singularity/cnvpytor:1.0--py39h6a678da_2': 'quay.io/biocontainers/cnvpytor:1.0--py39h6a678da_2' }" input: diff --git a/modules/cnvpytor/histogram/main.nf b/modules/cnvpytor/histogram/main.nf index 29dc1bff..e421f1b2 100644 --- a/modules/cnvpytor/histogram/main.nf +++ b/modules/cnvpytor/histogram/main.nf @@ -4,7 +4,7 @@ process CNVPYTOR_HISTOGRAM { conda (params.enable_conda ? "bioconda::cnvpytor=1.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/cnvpytor:A1.0--py39h6a678da_2': + 'https://depot.galaxyproject.org/singularity/cnvpytor:1.0--py39h6a678da_2': 'quay.io/biocontainers/cnvpytor:1.0--py39h6a678da_2' }" input: diff --git a/modules/cnvpytor/importreaddepth/main.nf b/modules/cnvpytor/importreaddepth/main.nf index 9fc7db08..1b037629 100644 --- a/modules/cnvpytor/importreaddepth/main.nf +++ b/modules/cnvpytor/importreaddepth/main.nf @@ -4,7 +4,7 @@ process CNVPYTOR_IMPORTREADDEPTH { conda (params.enable_conda ? "bioconda::cnvpytor=1.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/cnvpytor:A1.0--py39h6a678da_2': + 'https://depot.galaxyproject.org/singularity/cnvpytor:1.0--py39h6a678da_2': 'quay.io/biocontainers/cnvpytor:1.0--py39h6a678da_2' }" input: diff --git a/modules/cnvpytor/partition/main.nf b/modules/cnvpytor/partition/main.nf index e3f73955..74ab4026 100644 --- a/modules/cnvpytor/partition/main.nf +++ b/modules/cnvpytor/partition/main.nf @@ -4,7 +4,7 @@ process CNVPYTOR_PARTITION { conda (params.enable_conda ? "bioconda::cnvpytor=1.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/cnvpytor:A1.0--py39h6a678da_2': + 'https://depot.galaxyproject.org/singularity/cnvpytor:1.0--py39h6a678da_2': 'quay.io/biocontainers/cnvpytor:1.0--py39h6a678da_2' }" input: From eeda4136c096688d04cc40bb3c70d948213ed641 Mon Sep 17 00:00:00 2001 From: Ramprasad Neethiraj <20065894+ramprasadn@users.noreply.github.com> Date: Thu, 31 Mar 2022 13:42:44 +0200 Subject: [PATCH 085/283] Update cat module (#1469) * fix cat stub * add test * commit suggestions Co-authored-by: Jose Espinosa-Carrasco --- modules/cat/cat/main.nf | 2 ++ tests/modules/cat/cat/test.yml | 40 ++++++++++++++++++++++++++++++++++ 2 files changed, 42 insertions(+) diff --git a/modules/cat/cat/main.nf b/modules/cat/cat/main.nf index 09a41561..40e53f3e 100644 --- a/modules/cat/cat/main.nf +++ b/modules/cat/cat/main.nf @@ -49,6 +49,8 @@ process CAT_CAT { """ stub: + def file_list = files_in.collect { it.toString() } + prefix = task.ext.prefix ?: "${meta.id}${file_list[0].substring(file_list[0].lastIndexOf('.'))}" """ touch $prefix diff --git a/tests/modules/cat/cat/test.yml b/tests/modules/cat/cat/test.yml index d6e6595e..9710b665 100644 --- a/tests/modules/cat/cat/test.yml +++ b/tests/modules/cat/cat/test.yml @@ -7,6 +7,14 @@ - path: output/cat/test.fasta md5sum: f44b33a0e441ad58b2d3700270e2dbe2 +- name: cat unzipped unzipped stub + command: nextflow run ./tests/modules/cat/cat -entry test_cat_unzipped_unzipped -c ./tests/config/nextflow.config -c ./tests/modules/cat/cat/nextflow.config -stub-run + tags: + - cat + - cat/cat + files: + - path: output/cat/test.fasta + - name: cat zipped zipped command: nextflow run ./tests/modules/cat/cat -entry test_cat_zipped_zipped -c ./tests/config/nextflow.config -c ./tests/modules/cat/cat/nextflow.config tags: @@ -15,6 +23,14 @@ files: - path: output/cat/test.gz +- name: cat zipped zipped stub + command: nextflow run ./tests/modules/cat/cat -entry test_cat_zipped_zipped -c ./tests/config/nextflow.config -c ./tests/modules/cat/cat/nextflow.config -stub-run + tags: + - cat + - cat/cat + files: + - path: output/cat/test.gz + - name: cat zipped unzipped command: nextflow run ./tests/modules/cat/cat -entry test_cat_zipped_unzipped -c ./tests/config/nextflow.config -c ./tests/modules/cat/cat/nextflow.config tags: @@ -24,6 +40,14 @@ - path: output/cat/cat.txt md5sum: c439d3b60e7bc03e8802a451a0d9a5d9 +- name: cat zipped unzipped stub + command: nextflow run ./tests/modules/cat/cat -entry test_cat_zipped_unzipped -c ./tests/config/nextflow.config -c ./tests/modules/cat/cat/nextflow.config -stub-run + tags: + - cat + - cat/cat + files: + - path: output/cat/cat.txt + - name: cat unzipped zipped command: nextflow run ./tests/modules/cat/cat -entry test_cat_unzipped_zipped -c ./tests/config/nextflow.config -c ./tests/modules/cat/cat/nextflow.config tags: @@ -32,6 +56,14 @@ files: - path: output/cat/cat.txt.gz +- name: cat unzipped zipped stub + command: nextflow run ./tests/modules/cat/cat -entry test_cat_unzipped_zipped -c ./tests/config/nextflow.config -c ./tests/modules/cat/cat/nextflow.config -stub-run + tags: + - cat + - cat/cat + files: + - path: output/cat/cat.txt.gz + - name: cat one file unzipped zipped command: nextflow run ./tests/modules/cat/cat -entry test_cat_one_file_unzipped_zipped -c ./tests/config/nextflow.config -c ./tests/modules/cat/cat/nextflow.config tags: @@ -39,3 +71,11 @@ - cat/cat files: - path: output/cat/cat.txt.gz + +- name: cat one file unzipped zipped stub + command: nextflow run ./tests/modules/cat/cat -entry test_cat_one_file_unzipped_zipped -c ./tests/config/nextflow.config -c ./tests/modules/cat/cat/nextflow.config -stub-run + tags: + - cat + - cat/cat + files: + - path: output/cat/cat.txt.gz From 794f84534b0b259662199b7f3a67baa3c3ef9b62 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Guizard?= Date: Thu, 31 Mar 2022 14:11:51 +0100 Subject: [PATCH 086/283] New Module: `gstama/polyacleanup` (#1468) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * 📦 NEW: Add gstama/polyacleanup polyacleanup script remove remaining polyA tails from FLNC reads (Pacbio isoseq3) * 🐛 FIX: Prettier: replace simple quote by double quote * 🐛 FIX: Update TEMPLATE to nf-core 2.4 * 👌 IMPROVE: Compress outputs Co-authored-by: Sateesh Peri <33637490+sateeshperi@users.noreply.github.com> --- modules/gstama/polyacleanup/main.nf | 40 ++++++++++++++ modules/gstama/polyacleanup/meta.yml | 55 +++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/gstama/polyacleanup/main.nf | 15 +++++ .../gstama/polyacleanup/nextflow.config | 6 ++ tests/modules/gstama/polyacleanup/test.yml | 14 +++++ 6 files changed, 134 insertions(+) create mode 100644 modules/gstama/polyacleanup/main.nf create mode 100644 modules/gstama/polyacleanup/meta.yml create mode 100644 tests/modules/gstama/polyacleanup/main.nf create mode 100644 tests/modules/gstama/polyacleanup/nextflow.config create mode 100644 tests/modules/gstama/polyacleanup/test.yml diff --git a/modules/gstama/polyacleanup/main.nf b/modules/gstama/polyacleanup/main.nf new file mode 100644 index 00000000..214e5f93 --- /dev/null +++ b/modules/gstama/polyacleanup/main.nf @@ -0,0 +1,40 @@ +process GSTAMA_POLYACLEANUP { + tag "$meta.id" + label 'process_low' + + conda (params.enable_conda ? "bioconda::gs-tama=1.0.3" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gs-tama:1.0.3--hdfd78af_0': + 'quay.io/biocontainers/gs-tama:1.0.3--hdfd78af_0' }" + + input: + tuple val(meta), path(fasta) + + output: + tuple val(meta), path("*_tama.fa.gz") , emit: fasta + tuple val(meta), path("*_tama_polya_flnc_report.txt.gz"), emit: report + tuple val(meta), path("*_tama_tails.fa.gz") , emit: tails + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + if( "$fasta" == "${prefix}.fasta" | "$fasta" == "${prefix}.fa" ) error "Input and output names are the same, set prefix in module configuration" + """ + tama_flnc_polya_cleanup.py \\ + -f $fasta \\ + -p ${prefix} \\ + $args + gzip ${prefix}.fa + gzip ${prefix}_polya_flnc_report.txt + gzip ${prefix}_tails.fa + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + gstama: \$( tama_collapse.py -version | grep 'tc_version_date_'|sed 's/tc_version_date_//g' ) + END_VERSIONS + """ +} diff --git a/modules/gstama/polyacleanup/meta.yml b/modules/gstama/polyacleanup/meta.yml new file mode 100644 index 00000000..c7047d15 --- /dev/null +++ b/modules/gstama/polyacleanup/meta.yml @@ -0,0 +1,55 @@ +name: gstama_polyacleanup +description: Helper script, remove remaining polyA sequences from Full Length Non Chimeric reads (Pacbio isoseq3) +keywords: + - gstama + - gstama/polyacleanup + - long-read + - isoseq + - tama + - trancriptome + - annotation +tools: + - gstama: + description: Gene-Switch Transcriptome Annotation by Modular Algorithms + homepage: https://github.com/sguizard/gs-tama + documentation: https://github.com/GenomeRIK/tama/wiki + tool_dev_url: https://github.com/sguizard/gs-tama + doi: "https://doi.org/10.1186/s12864-020-07123-7" + licence: ["GPL v3 License"] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - fasta: + type: file + description: Full Length Non Chimeric reads in fasta format + pattern: "*.{fa,fasta}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - fasta: + type: file + description: The Full Length Non Chimeric reads clened from remaining polyA tails. The sequences are in FASTA format compressed with gzip. + pattern: "*_tama.fa.gz" + - report: + type: file + description: A text file describing the number of polyA tails removed and their length. Compressed with gzip. + pattern: "*_tama_polya_flnc_report.txt.gz" + - tails: + type: file + description: A gzip compressed FASTA file of trimmed polyA tails. + pattern: "*_tama_tails.fa.gz" + +authors: + - "@sguizard" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 72ea6cf4..20933e2d 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -811,6 +811,10 @@ gstama/merge: - modules/gstama/merge/** - tests/modules/gstama/merge/** +gstama/polyacleanup: + - modules/gstama/polyacleanup/** + - tests/modules/gstama/polyacleanup/** + gtdbtk/classifywf: - modules/gtdbtk/classifywf/** - tests/modules/gtdbtk/classifywf/** diff --git a/tests/modules/gstama/polyacleanup/main.nf b/tests/modules/gstama/polyacleanup/main.nf new file mode 100644 index 00000000..67b3fa87 --- /dev/null +++ b/tests/modules/gstama/polyacleanup/main.nf @@ -0,0 +1,15 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { GSTAMA_POLYACLEANUP } from '../../../../modules/gstama/polyacleanup/main.nf' + +workflow test_gstama_polyacleanup { + + input = [ + [ id:'test' ], // meta map + file(params.test_data['homo_sapiens']['genome']['transcriptome_fasta'], checkIfExists: true) + ] + + GSTAMA_POLYACLEANUP ( input ) +} diff --git a/tests/modules/gstama/polyacleanup/nextflow.config b/tests/modules/gstama/polyacleanup/nextflow.config new file mode 100644 index 00000000..ff407702 --- /dev/null +++ b/tests/modules/gstama/polyacleanup/nextflow.config @@ -0,0 +1,6 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + ext.prefix = { "${meta.id}_tama" } + +} diff --git a/tests/modules/gstama/polyacleanup/test.yml b/tests/modules/gstama/polyacleanup/test.yml new file mode 100644 index 00000000..7f9f4260 --- /dev/null +++ b/tests/modules/gstama/polyacleanup/test.yml @@ -0,0 +1,14 @@ +- name: gstama polyacleanup test_gstama_polyacleanup + command: nextflow run tests/modules/gstama/polyacleanup -entry test_gstama_polyacleanup -c tests/config/nextflow.config + tags: + - gstama + - gstama/polyacleanup + files: + - path: output/gstama/test_tama.fa.gz + md5sum: 9c768387478e5f966a42c369c0270b09 + - path: output/gstama/test_tama_polya_flnc_report.txt.gz + md5sum: fe3606979ed11538aacd83159f4cff03 + - path: output/gstama/test_tama_tails.fa.gz + md5sum: ba21256c0afe0bda71b3ee66b4c761bf + - path: output/gstama/versions.yml + md5sum: 07ebb812ae13a350d955fab7600b2542 From 64b06baa06bc41269282bc7d286af37e859ad244 Mon Sep 17 00:00:00 2001 From: Maxime Borry Date: Thu, 31 Mar 2022 15:18:18 +0200 Subject: [PATCH 087/283] Update PyDamage module (#1467) * add pydamage module * remove TODOs * split module by subcommands * update version parsing * remove forgotten TODOs * update module names * remove old holistic module * Update modules/pydamage/analyze/main.nf Co-authored-by: James A. Fellows Yates * add keywords * update resource requirement * Update modules/pydamage/filter/main.nf Co-authored-by: James A. Fellows Yates * Update modules/pydamage/filter/meta.yml Co-authored-by: James A. Fellows Yates * merge from upstream * update pydamage from upstream * add freebayes * update pydamage test from upstream * fix meta.yml * update functions.nf * update test.yml * update version parsing * update version parsing * fix indentation * Update modules/freebayes/main.nf Co-authored-by: James A. Fellows Yates * Update modules/freebayes/main.nf Co-authored-by: James A. Fellows Yates * Update modules/freebayes/main.nf Co-authored-by: James A. Fellows Yates * add optional inputs * Update modules/freebayes/main.nf Co-authored-by: James A. Fellows Yates * add bed test * add metabat2 module * only freebayes * remove metabat2 * update md5sum because of vcf including date of the day * add keyword * rescue conflicted files * attempt to fix ECLint * add pytest workflow for metabat * remove - * Update modules/metabat2/jgisummarizebamcontigdepths/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/metabat2/metabat2/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/metabat2/metabat2/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/metabat2/jgisummarizebamcontigdepths/meta.yml Co-authored-by: James A. Fellows Yates * add optional inputs/outpus * remove trailing whitespace * compressing and removing not reproducible md5sums * follow symlinks while decompressing * Update tests/modules/metabat2/metabat2/main.nf Co-authored-by: James A. Fellows Yates * Update tests/modules/metabat2/metabat2/main.nf Co-authored-by: James A. Fellows Yates * split tests * export env variable * Update modules/metabat2/jgisummarizebamcontigdepths/main.nf Co-authored-by: James A. Fellows Yates * Update modules/metabat2/jgisummarizebamcontigdepths/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/metabat2/metabat2/main.nf Co-authored-by: James A. Fellows Yates * Update modules/metabat2/metabat2/meta.yml Co-authored-by: James A. Fellows Yates * answer PR comments and switch to bgzip * add bacillus fragilis alignments * add meta to samtools/faidx * move to bgzip * update freebayes test results * bump pydamage version to 0.70 Co-authored-by: James A. Fellows Yates Co-authored-by: Harshil Patel Co-authored-by: Gregor Sturm Co-authored-by: Sateesh Peri <33637490+sateeshperi@users.noreply.github.com> --- modules/pydamage/analyze/main.nf | 6 +++--- modules/pydamage/filter/main.nf | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/modules/pydamage/analyze/main.nf b/modules/pydamage/analyze/main.nf index 39e01fa1..3463b0e5 100644 --- a/modules/pydamage/analyze/main.nf +++ b/modules/pydamage/analyze/main.nf @@ -2,10 +2,10 @@ process PYDAMAGE_ANALYZE { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? "bioconda::pydamage=0.62" : null) + conda (params.enable_conda ? "bioconda::pydamage=0.70" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/pydamage:0.62--pyhdfd78af_0' : - 'quay.io/biocontainers/pydamage:0.62--pyhdfd78af_0' }" + 'https://depot.galaxyproject.org/singularity/pydamage:0.70--pyhdfd78af_0' : + 'quay.io/biocontainers/pydamage:0.70--pyhdfd78af_0' }" input: tuple val(meta), path(bam), path(bai) diff --git a/modules/pydamage/filter/main.nf b/modules/pydamage/filter/main.nf index a748875a..14fbf1c5 100644 --- a/modules/pydamage/filter/main.nf +++ b/modules/pydamage/filter/main.nf @@ -2,10 +2,10 @@ process PYDAMAGE_FILTER { tag "$meta.id" label 'process_low' - conda (params.enable_conda ? "bioconda::pydamage=0.62" : null) + conda (params.enable_conda ? "bioconda::pydamage=0.70" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/pydamage:0.62--pyhdfd78af_0' : - 'quay.io/biocontainers/pydamage:0.62--pyhdfd78af_0' }" + 'https://depot.galaxyproject.org/singularity/pydamage:0.70--pyhdfd78af_0' : + 'quay.io/biocontainers/pydamage:0.70--pyhdfd78af_0' }" input: tuple val(meta), path(csv) From 5e7daa7b96fa9699a52ac8fa3dcbf4291cc6d050 Mon Sep 17 00:00:00 2001 From: Jose Espinosa-Carrasco Date: Thu, 31 Mar 2022 23:59:21 +0200 Subject: [PATCH 088/283] Add task.ext.args to phantompeakqualtools and finish the module (#1474) * Add --max--ppsize option * Add args to phantompeakqualtools and all the missing files (test, yml, ...) * Fix tests * Add the nextflow.config * Fix tests --- modules/phantompeakqualtools/main.nf | 2 +- modules/phantompeakqualtools/meta.yml | 60 +++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/phantompeakqualtools/main.nf | 25 ++++++++ .../phantompeakqualtools/nextflow.config | 5 ++ tests/modules/phantompeakqualtools/test.yml | 23 +++++++ 6 files changed, 118 insertions(+), 1 deletion(-) create mode 100644 modules/phantompeakqualtools/meta.yml create mode 100644 tests/modules/phantompeakqualtools/main.nf create mode 100644 tests/modules/phantompeakqualtools/nextflow.config create mode 100644 tests/modules/phantompeakqualtools/test.yml diff --git a/modules/phantompeakqualtools/main.nf b/modules/phantompeakqualtools/main.nf index c8325b05..f584cb65 100644 --- a/modules/phantompeakqualtools/main.nf +++ b/modules/phantompeakqualtools/main.nf @@ -26,7 +26,7 @@ process PHANTOMPEAKQUALTOOLS { def prefix = task.ext.prefix ?: "${meta.id}" """ RUN_SPP=`which run_spp.R` - Rscript -e "library(caTools); source(\\"\$RUN_SPP\\")" -c="$bam" -savp="${prefix}.spp.pdf" -savd="${prefix}.spp.Rdata" -out="${prefix}.spp.out" -p=$task.cpus + Rscript $args -e "library(caTools); source(\\"\$RUN_SPP\\")" -c="$bam" -savp="${prefix}.spp.pdf" -savd="${prefix}.spp.Rdata" -out="${prefix}.spp.out" -p=$task.cpus cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/modules/phantompeakqualtools/meta.yml b/modules/phantompeakqualtools/meta.yml new file mode 100644 index 00000000..6488500d --- /dev/null +++ b/modules/phantompeakqualtools/meta.yml @@ -0,0 +1,60 @@ +name: "phantompeakqualtools" + +description: +keywords: + - "ChIP-Seq" + - "QC" + - "phantom peaks" +tools: + - "phantompeakqualtools": + description: | + "This package computes informative enrichment and quality measures + for ChIP-seq/DNase-seq/FAIRE-seq/MNase-seq data. It can also be used + to obtain robust estimates of the predominant fragment length or + characteristic tag shift values in these assays." + homepage: "None" + documentation: "https://github.com/kundajelab/phantompeakqualtools" + tool_dev_url: "https://github.com/kundajelab/phantompeakqualtools" + doi: "https://doi.org/10.1101/gr.136184.111" + licence: "['BSD-3-clause']" + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: BAM/CRAM/SAM file + pattern: "*.{bam,cram,sam}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - spp: + type: file + description: | + A ChIP-Seq Processing Pipeline file containing + peakshift/phantomPeak results + pattern: "*.{out}" + - pdf: + type: file + description: A pdf containing save cross-correlation plots + pattern: "*.{pdf}" + - rdata: + type: file + description: Rdata file containing the R session + pattern: "*.{Rdata}" + +authors: + - "@drpatelh" + - "@Emiller88" + - "@JoseEspinosa" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 20933e2d..8425b16c 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -1303,6 +1303,10 @@ peddy: - modules/peddy/** - tests/modules/peddy/** +phantompeakqualtools: + - modules/phantompeakqualtools/** + - tests/modules/phantompeakqualtools/** + phyloflash: - modules/phyloflash/** - tests/modules/phyloflash/** diff --git a/tests/modules/phantompeakqualtools/main.nf b/tests/modules/phantompeakqualtools/main.nf new file mode 100644 index 00000000..3b995d68 --- /dev/null +++ b/tests/modules/phantompeakqualtools/main.nf @@ -0,0 +1,25 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { PHANTOMPEAKQUALTOOLS } from '../../../modules/phantompeakqualtools/main.nf' + +workflow test_phantompeakqualtools_single_end { + + input = [ + [ id:'test', single_end:true ], // meta map + file(params.test_data['sarscov2']['illumina']['test_single_end_bam'], checkIfExists: true) + ] + + PHANTOMPEAKQUALTOOLS ( input ) +} + +workflow test_phantompeakqualtools_paired_end { + + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true) + ] + + PHANTOMPEAKQUALTOOLS ( input ) +} diff --git a/tests/modules/phantompeakqualtools/nextflow.config b/tests/modules/phantompeakqualtools/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/phantompeakqualtools/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/phantompeakqualtools/test.yml b/tests/modules/phantompeakqualtools/test.yml new file mode 100644 index 00000000..3ab612bd --- /dev/null +++ b/tests/modules/phantompeakqualtools/test.yml @@ -0,0 +1,23 @@ +- name: phantompeakqualtools test_phantompeakqualtools_single_end + command: nextflow run tests/modules/phantompeakqualtools -entry test_phantompeakqualtools_single_end -c tests/config/nextflow.config + tags: + - phantompeakqualtools + files: + - path: output/phantompeakqualtools/test.spp.Rdata + - path: output/phantompeakqualtools/test.spp.out + md5sum: b01d976506b6fe45b66c821b1e8a1d15 + - path: output/phantompeakqualtools/test.spp.pdf + - path: output/phantompeakqualtools/versions.yml + md5sum: 6c2ede1aac4c574e3c72fbe09f15c03f + +- name: phantompeakqualtools test_phantompeakqualtools_paired_end + command: nextflow run tests/modules/phantompeakqualtools -entry test_phantompeakqualtools_paired_end -c tests/config/nextflow.config + tags: + - phantompeakqualtools + files: + - path: output/phantompeakqualtools/test.spp.Rdata + - path: output/phantompeakqualtools/test.spp.out + md5sum: eed46e75eab119224f397a7a8b5924e6 + - path: output/phantompeakqualtools/test.spp.pdf + - path: output/phantompeakqualtools/versions.yml + md5sum: 383d2dd583fcb40451bde0d3840bdb72 From 8ce68107871c96519b3eb0095d97896e34ef4489 Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Fri, 1 Apr 2022 11:33:07 +0200 Subject: [PATCH 089/283] Update DASTool to 1.1.4 (#1471) * fix: remove left-over unnecessary code * Update DASTool * Fix tests * Fix test.ymls * Fix container build version * Make tests less strict to account for variability * Apply suggestions from code review Co-authored-by: Daniel Straub <42973691+d4straub@users.noreply.github.com> * Add missing description Co-authored-by: Sateesh Peri <33637490+sateeshperi@users.noreply.github.com> Co-authored-by: Daniel Straub <42973691+d4straub@users.noreply.github.com> --- modules/dastool/dastool/main.nf | 29 ++++------ modules/dastool/dastool/meta.yml | 18 +++--- modules/dastool/fastatocontig2bin/main.nf | 41 ++++++++++++++ modules/dastool/fastatocontig2bin/meta.yml | 56 +++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/dastool/dastool/main.nf | 10 ++-- tests/modules/dastool/dastool/test.yml | 29 +++++----- .../modules/dastool/fastatocontig2bin/main.nf | 48 ++++++++++++++++ .../dastool/fastatocontig2bin/nextflow.config | 5 ++ .../dastool/fastatocontig2bin/test.yml | 20 +++++++ 10 files changed, 216 insertions(+), 44 deletions(-) create mode 100644 modules/dastool/fastatocontig2bin/main.nf create mode 100644 modules/dastool/fastatocontig2bin/meta.yml create mode 100644 tests/modules/dastool/fastatocontig2bin/main.nf create mode 100644 tests/modules/dastool/fastatocontig2bin/nextflow.config create mode 100644 tests/modules/dastool/fastatocontig2bin/test.yml diff --git a/modules/dastool/dastool/main.nf b/modules/dastool/dastool/main.nf index 53dfea19..968f85de 100644 --- a/modules/dastool/dastool/main.nf +++ b/modules/dastool/dastool/main.nf @@ -2,27 +2,28 @@ process DASTOOL_DASTOOL { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? "bioconda::das_tool=1.1.3" : null) + conda (params.enable_conda ? "bioconda::das_tool=1.1.4" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/das_tool:1.1.3--r41hdfd78af_0' : - 'quay.io/biocontainers/das_tool:1.1.3--r41hdfd78af_0' }" + 'https://depot.galaxyproject.org/singularity/das_tool:1.1.4--r41hdfd78af_1' : + 'quay.io/biocontainers/das_tool:1.1.4--r41hdfd78af_1' }" input: tuple val(meta), path(contigs), path(bins) path(proteins) path(db_directory) - val(search_engine) output: tuple val(meta), path("*.log") , emit: log - tuple val(meta), path("*_summary.txt") , emit: summary - tuple val(meta), path("*_DASTool_scaffolds2bin.txt") , emit: scaffolds2bin + tuple val(meta), path("*_summary.tsv") , emit: summary + tuple val(meta), path("*_DASTool_contig2bin.tsv") , emit: contig2bin tuple val(meta), path("*.eval") , optional: true, emit: eval tuple val(meta), path("*_DASTool_bins/*.fa") , optional: true, emit: bins tuple val(meta), path("*.pdf") , optional: true, emit: pdfs - tuple val(meta), path("*.proteins.faa") , optional: true, emit: fasta_proteins + tuple val(meta), path("*.candidates.faa") , optional: true, emit: fasta_proteins + tuple val(meta), path("*.faa") , optional: true, emit: candidates_faa tuple val(meta), path("*.archaea.scg") , optional: true, emit: fasta_archaea_scg tuple val(meta), path("*.bacteria.scg") , optional: true, emit: fasta_bacteria_scg + tuple val(meta), path("*.b6") , optional: true, emit: b6 tuple val(meta), path("*.seqlength") , optional: true, emit: seqlength path "versions.yml" , emit: versions @@ -33,17 +34,12 @@ process DASTOOL_DASTOOL { def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" def bin_list = bins instanceof List ? bins.join(",") : "$bins" - def engine = search_engine ? "--search_engine $search_engine" : "--search_engine diamond" def db_dir = db_directory ? "--db_directory $db_directory" : "" def clean_contigs = contigs.toString() - ".gz" def decompress_contigs = contigs.toString() == clean_contigs ? "" : "gunzip -q -f $contigs" - def decompress_proteins = proteins ? "gunzip -f $proteins" : "" def clean_proteins = proteins ? proteins.toString() - ".gz" : "" - def proteins_pred = proteins ? "--proteins $clean_proteins" : "" - - if (! search_engine) { - log.info('[DAS_Tool] Default search engine (USEARCH) is proprietary software and not available in bioconda. Using DIAMOND as alternative.') - } + def decompress_proteins = proteins ? "gunzip -f $proteins" : "" + def proteins_pred = proteins ? "-p $clean_proteins" : "" """ $decompress_proteins @@ -53,15 +49,14 @@ process DASTOOL_DASTOOL { $args \\ $proteins_pred \\ $db_dir \\ - $engine \\ -t $task.cpus \\ - --bins $bin_list \\ + -i $bin_list \\ -c $clean_contigs \\ -o $prefix cat <<-END_VERSIONS > versions.yml "${task.process}": - dastool: \$( DAS_Tool --version 2>&1 | grep "DAS Tool" | sed 's/DAS Tool version //' ) + dastool: \$( DAS_Tool --version 2>&1 | grep "DAS Tool" | sed 's/DAS Tool //' ) END_VERSIONS """ } diff --git a/modules/dastool/dastool/meta.yml b/modules/dastool/dastool/meta.yml index a77df9bd..0889ca47 100644 --- a/modules/dastool/dastool/meta.yml +++ b/modules/dastool/dastool/meta.yml @@ -34,8 +34,8 @@ input: pattern: "*.{fa.gz,fas.gz,fasta.gz}" - bins: type: file - description: "Scaffolds2bin tabular file generated with dastool/scaffolds2bin" - pattern: "*.scaffolds2bin.tsv" + description: "FastaToContig2Bin tabular file generated with dastool/fastatocontig2bin" + pattern: "*.tsv" - proteins: type: file description: Predicted proteins in prodigal fasta format (>scaffoldID_geneNo) @@ -43,9 +43,6 @@ input: - db_directory: type: file description: (optional) Directory of single copy gene database. - - search_engine: - type: val - description: Engine used for single copy gene identification. USEARCH is not supported due to it being proprietary [blast/diamond] output: - meta: @@ -65,14 +62,17 @@ output: type: file description: Summary of output bins including quality and completeness estimates pattern: "*summary.txt" - - scaffolds2bin: + - contig2bin: type: file description: Scaffolds to bin file of output bins - pattern: "*.scaffolds2bin.txt" + pattern: "*.contig2bin.txt" - eval: type: file description: Quality and completeness estimates of input bin sets pattern: "*.eval" + - bins: + description: Final refined bins in fasta format + pattern: "*.fa" - pdfs: type: file description: Plots showing the amount of high quality bins and score distribution of bins per method @@ -89,6 +89,10 @@ output: type: file description: Results of bacterial single-copy-gene prediction pattern: "*.bacteria.scg" + - b6: + type: file + description: Results in b6 format + pattern: "*.b6" - seqlength: type: file description: Summary of contig lengths diff --git a/modules/dastool/fastatocontig2bin/main.nf b/modules/dastool/fastatocontig2bin/main.nf new file mode 100644 index 00000000..8bb13380 --- /dev/null +++ b/modules/dastool/fastatocontig2bin/main.nf @@ -0,0 +1,41 @@ +process DASTOOL_FASTATOCONTIG2BIN { + tag "$meta.id" + label 'process_low' + + conda (params.enable_conda ? "bioconda::das_tool=1.1.4" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/das_tool:1.1.4--r41hdfd78af_1' : + 'quay.io/biocontainers/das_tool:1.1.4--r41hdfd78af_1' }" + + input: + tuple val(meta), path(fasta) + val(extension) + + output: + tuple val(meta), path("*.tsv"), emit: fastatocontig2bin + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def file_extension = extension ? extension : "fasta" + def clean_fasta = fasta.toString() - ".gz" + def decompress_fasta = fasta.toString() == clean_fasta ? "" : "gunzip -q -f $fasta" + """ + $decompress_fasta + + Fasta_to_Contig2Bin.sh \\ + $args \\ + -i . \\ + -e $file_extension \\ + > ${prefix}.tsv + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + dastool: \$( DAS_Tool --version 2>&1 | grep "DAS Tool" | sed 's/DAS Tool //' ) + END_VERSIONS + """ +} diff --git a/modules/dastool/fastatocontig2bin/meta.yml b/modules/dastool/fastatocontig2bin/meta.yml new file mode 100644 index 00000000..1176ae96 --- /dev/null +++ b/modules/dastool/fastatocontig2bin/meta.yml @@ -0,0 +1,56 @@ +name: dastool_fastatocontig2bin +description: Helper script to convert a set of bins in fasta format to tabular scaffolds2bin format +keywords: + - binning + - das tool + - table + - de novo + - bins + - contigs + - assembly + - das_tool +tools: + - dastool: + description: | + DAS Tool is an automated method that integrates the results + of a flexible number of binning algorithms to calculate an optimized, non-redundant + set of bins from a single assembly. + + homepage: https://github.com/cmks/DAS_Tool + documentation: https://github.com/cmks/DAS_Tool + tool_dev_url: https://github.com/cmks/DAS_Tool + doi: "10.1038/s41564-018-0171-1" + licence: ["BSD"] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - fasta: + type: file + description: Fasta of list of fasta files recommended to be gathered via with .collect() of bins + pattern: "*.{fa,fa.gz,fas,fas.gz,fna,fna.gz,fasta,fasta.gz}" + - extension: + type: val + description: Fasta file extension (fa | fas | fasta | ...), without .gz suffix, if gzipped input. + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - fastatocontig2bin: + type: file + description: tabular contig2bin file for DAS tool input + pattern: "*.tsv" + +authors: + - "@maxibor" + - "@jfy133" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 8425b16c..24bfe641 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -487,6 +487,10 @@ dastool/dastool: - modules/dastool/dastool/** - tests/modules/dastool/dastool/** +dastool/fastatocontig2bin: + - modules/dastool/fastatocontig2bin/** + - tests/modules/dastool/fastatocontig2bin/** + dastool/scaffolds2bin: - modules/dastool/scaffolds2bin/** - tests/modules/dastool/scaffolds2bin/** diff --git a/tests/modules/dastool/dastool/main.nf b/tests/modules/dastool/dastool/main.nf index f6f6becf..9853e724 100644 --- a/tests/modules/dastool/dastool/main.nf +++ b/tests/modules/dastool/dastool/main.nf @@ -3,7 +3,7 @@ nextflow.enable.dsl = 2 include { METABAT2_METABAT2 } from '../../../../modules/metabat2/metabat2/main.nf' include { METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS } from '../../../../modules/metabat2/jgisummarizebamcontigdepths/main.nf' -include { DASTOOL_SCAFFOLDS2BIN } from '../../../../modules/dastool/scaffolds2bin/main.nf' +include { DASTOOL_FASTATOCONTIG2BIN } from '../../../../modules/dastool/fastatocontig2bin/main.nf' include { DASTOOL_DASTOOL } from '../../../../modules/dastool/dastool/main.nf' workflow test_dastool_dastool { @@ -21,13 +21,13 @@ workflow test_dastool_dastool { METABAT2_METABAT2 ( input_metabat2 ) - DASTOOL_SCAFFOLDS2BIN ( METABAT2_METABAT2.out.fasta.collect(), "fa") + DASTOOL_FASTATOCONTIG2BIN ( METABAT2_METABAT2.out.fasta.collect(), "fa") Channel.of([ [ id:'test', single_end:false ], // meta map file(params.test_data['bacteroides_fragilis']['genome']['genome_fna_gz'], checkIfExists: true)]) - .join(DASTOOL_SCAFFOLDS2BIN.out.scaffolds2bin) + .join( DASTOOL_FASTATOCONTIG2BIN.out.fastatocontig2bin ) .set {input_dastool} - - DASTOOL_DASTOOL ( input_dastool, [], [], [] ) + + DASTOOL_DASTOOL ( input_dastool, [], [] ) } diff --git a/tests/modules/dastool/dastool/test.yml b/tests/modules/dastool/dastool/test.yml index 7f7eb19c..cda17bda 100644 --- a/tests/modules/dastool/dastool/test.yml +++ b/tests/modules/dastool/dastool/test.yml @@ -1,29 +1,28 @@ - name: dastool dastool test_dastool_dastool - command: nextflow run ./tests/modules/dastool/dastool -entry test_dastool_dastool -c ./tests/config/nextflow.config -c ./tests/modules/dastool/dastool/nextflow.config + command: nextflow run tests/modules/dastool/dastool -entry test_dastool_dastool -c tests/config/nextflow.config tags: - - dastool - dastool/dastool + - dastool files: - path: output/dastool/test.seqlength md5sum: b815a5811008c36808a59b1d0dcfab24 - path: output/dastool/test.tsv md5sum: 6e46c0be14dded7cb13af38f54feea47 - path: output/dastool/test_DASTool.log - contains: - - "DAS Tool run on" - - path: output/dastool/test_DASTool_scaffolds2bin.txt + - path: output/dastool/test_DASTool_contig2bin.tsv md5sum: 6e46c0be14dded7cb13af38f54feea47 - - path: output/dastool/test_DASTool_summary.txt - md5sum: a3efa8717b30dfada78dc5ae9a3dc396 + - path: output/dastool/test_DASTool_summary.tsv + md5sum: ab9dd3709a59a69bc66030b9e0ff3d5b + - path: output/dastool/test_proteins.faa + - path: output/dastool/test_proteins.faa.all.b6 + md5sum: 39c11237ef22ac73109aaac267e185d0 - path: output/dastool/test_proteins.faa.archaea.scg md5sum: e79d82eecee25821d1658ea4f082601d - path: output/dastool/test_proteins.faa.bacteria.scg md5sum: 8132cfb17cf398d41c036ead55c96ffe - - path: output/dastool/test_test.tsv.eval - md5sum: a3efa8717b30dfada78dc5ae9a3dc396 - - path: output/metabat2/bins/test.1.fa.gz - md5sum: 2b297bf557cc3831b800348859331268 - - path: output/metabat2/test.tsv.gz - md5sum: 619338fa5019e361d5545ce385a6961f - - path: output/metabat2/test.txt.gz - md5sum: 745a0446af6ef68b930975e9ce5a95d6 + - path: output/dastool/test_proteins.faa.findSCG.b6 + md5sum: 48e90e12cd6c88d00608777dbc48a82a + - path: output/dastool/test_proteins.faa.scg.candidates.faa + md5sum: d94b7bed0f8aa9cf2824d72c548c537c + - path: output/dastool/versions.yml + md5sum: 004e04c6a38652df2e0c59c44e29c9de diff --git a/tests/modules/dastool/fastatocontig2bin/main.nf b/tests/modules/dastool/fastatocontig2bin/main.nf new file mode 100644 index 00000000..0178dbf9 --- /dev/null +++ b/tests/modules/dastool/fastatocontig2bin/main.nf @@ -0,0 +1,48 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { GUNZIP } from '../../../../modules/gunzip/main.nf' +include { METABAT2_METABAT2 } from '../../../../modules/metabat2/metabat2/main.nf' +include { METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS } from '../../../../modules/metabat2/jgisummarizebamcontigdepths/main.nf' +include { DASTOOL_FASTATOCONTIG2BIN } from '../../../../modules/dastool/fastatocontig2bin/main.nf' + +workflow test_dastool_fastatocontig2bin { + + input_depth = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['bacteroides_fragilis']['illumina']['test1_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['bacteroides_fragilis']['illumina']['test1_paired_end_sorted_bam_bai'], checkIfExists: true) ] + + METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS ( input_depth ) + + Channel.fromPath(params.test_data['bacteroides_fragilis']['genome']['genome_fna_gz'], checkIfExists: true) + .map { it -> [[ id:'test', single_end:false ], it] } + .join(METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS.out.depth) + .set { input_metabat2 } + + METABAT2_METABAT2 ( input_metabat2 ) + + DASTOOL_FASTATOCONTIG2BIN ( METABAT2_METABAT2.out.fasta.collect(), "fa") +} + +workflow test_dastool_fastatocontig2bin_ungzipped { + + input_depth = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['bacteroides_fragilis']['illumina']['test1_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['bacteroides_fragilis']['illumina']['test1_paired_end_sorted_bam_bai'], checkIfExists: true) ] + + + METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS ( input_depth ) + + Channel.fromPath(params.test_data['bacteroides_fragilis']['genome']['genome_fna_gz'], checkIfExists: true) + .map { it -> [[ id:'test', single_end:false ], it] } + .join(METABAT2_JGISUMMARIZEBAMCONTIGDEPTHS.out.depth) + .set { input_metabat2 } + + METABAT2_METABAT2 ( input_metabat2 ) + + // TODO test unzipped input files + ch_input_2_fastatocontig2bin = GUNZIP( METABAT2_METABAT2.out.fasta ).gunzip + + DASTOOL_FASTATOCONTIG2BIN ( ch_input_2_fastatocontig2bin, "fa") +} diff --git a/tests/modules/dastool/fastatocontig2bin/nextflow.config b/tests/modules/dastool/fastatocontig2bin/nextflow.config new file mode 100644 index 00000000..50f50a7a --- /dev/null +++ b/tests/modules/dastool/fastatocontig2bin/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} \ No newline at end of file diff --git a/tests/modules/dastool/fastatocontig2bin/test.yml b/tests/modules/dastool/fastatocontig2bin/test.yml new file mode 100644 index 00000000..94881438 --- /dev/null +++ b/tests/modules/dastool/fastatocontig2bin/test.yml @@ -0,0 +1,20 @@ +- name: dastool fastatocontig2bin test_dastool_fastatocontig2bin + command: nextflow run tests/modules/dastool/fastatocontig2bin -entry test_dastool_fastatocontig2bin -c tests/config/nextflow.config + tags: + - dastool + - dastool/fastatocontig2bin + files: + - path: output/dastool/test.tsv + md5sum: 6e46c0be14dded7cb13af38f54feea47 + - path: output/dastool/versions.yml + md5sum: ff4b6f14bee4548bf09b5e602c306595 + +- name: dastool fastatocontig2bin test_dastool_fastatocontig2bin_ungzipped + command: nextflow run tests/modules/dastool/fastatocontig2bin -entry test_dastool_fastatocontig2bin_ungzipped -c tests/config/nextflow.config + tags: + - dastool + - dastool/fastatocontig2bin + files: + - path: output/dastool/test.tsv + md5sum: 6e46c0be14dded7cb13af38f54feea47 + - path: output/dastool/versions.yml From 67c1bc9568cfc40cf7038c7be13b976fe76d76a1 Mon Sep 17 00:00:00 2001 From: Ramprasad Neethiraj <20065894+ramprasadn@users.noreply.github.com> Date: Fri, 1 Apr 2022 17:35:15 +0200 Subject: [PATCH 090/283] Add stubs to cnvpytor module (#1473) * callcnvs stub and tests * partition stub and test * histogram stub and test * importreaddepth stub and tests * update module scripts Co-authored-by: Sima Rahimi --- modules/cnvpytor/callcnvs/main.nf | 11 ++++++ modules/cnvpytor/histogram/main.nf | 10 +++++ modules/cnvpytor/importreaddepth/main.nf | 11 ++++++ modules/cnvpytor/partition/main.nf | 12 +++++- tests/modules/cnvpytor/callcnvs/test.yml | 14 ++++++- tests/modules/cnvpytor/histogram/test.yml | 12 +++++- .../cnvpytor/importreaddepth/nextflow.config | 2 +- .../modules/cnvpytor/importreaddepth/test.yml | 39 +++++++++++++++++++ tests/modules/cnvpytor/partition/test.yml | 12 +++++- 9 files changed, 117 insertions(+), 6 deletions(-) create mode 100644 tests/modules/cnvpytor/importreaddepth/test.yml diff --git a/modules/cnvpytor/callcnvs/main.nf b/modules/cnvpytor/callcnvs/main.nf index e296656b..17675cde 100644 --- a/modules/cnvpytor/callcnvs/main.nf +++ b/modules/cnvpytor/callcnvs/main.nf @@ -30,4 +30,15 @@ process CNVPYTOR_CALLCNVS { cnvpytor: \$(echo \$(cnvpytor --version 2>&1) | sed 's/^.*pyCNVnator //; s/Using.*\$//' )) END_VERSIONS """ + + stub: + def prefix = task.ext.prefix ?: "${meta.id}" + """ + touch ${prefix}.tsv + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + cnvpytor: \$(echo \$(cnvpytor --version 2>&1) | sed 's/^.*pyCNVnator //; s/Using.*\$//' )) + END_VERSIONS + """ } diff --git a/modules/cnvpytor/histogram/main.nf b/modules/cnvpytor/histogram/main.nf index e421f1b2..d1c6856c 100644 --- a/modules/cnvpytor/histogram/main.nf +++ b/modules/cnvpytor/histogram/main.nf @@ -29,4 +29,14 @@ process CNVPYTOR_HISTOGRAM { cnvpytor: \$(echo \$(cnvpytor --version 2>&1) | sed 's/^.*pyCNVnator //; s/Using.*\$//' )) END_VERSIONS """ + + stub: + """ + touch test.pytor + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + cnvpytor: \$(echo \$(cnvpytor --version 2>&1) | sed 's/^.*pyCNVnator //; s/Using.*\$//' )) + END_VERSIONS + """ } diff --git a/modules/cnvpytor/importreaddepth/main.nf b/modules/cnvpytor/importreaddepth/main.nf index 1b037629..162da719 100644 --- a/modules/cnvpytor/importreaddepth/main.nf +++ b/modules/cnvpytor/importreaddepth/main.nf @@ -35,4 +35,15 @@ process CNVPYTOR_IMPORTREADDEPTH { cnvpytor: \$(echo \$(cnvpytor --version 2>&1) | sed 's/^.*pyCNVnator //; s/Using.*\$//' )) END_VERSIONS """ + + stub: + def prefix = task.ext.prefix ?: "${meta.id}" + """ + touch ${prefix}.pytor + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + cnvpytor: \$(echo \$(cnvpytor --version 2>&1) | sed 's/^.*pyCNVnator //; s/Using.*\$//' )) + END_VERSIONS + """ } diff --git a/modules/cnvpytor/partition/main.nf b/modules/cnvpytor/partition/main.nf index 74ab4026..975458bf 100644 --- a/modules/cnvpytor/partition/main.nf +++ b/modules/cnvpytor/partition/main.nf @@ -18,7 +18,7 @@ process CNVPYTOR_PARTITION { task.ext.when == null || task.ext.when script: - def args = task.ext.args ?: '1000' + def args = task.ext.args ?: '' """ cnvpytor \\ -root $pytor \\ @@ -29,4 +29,14 @@ process CNVPYTOR_PARTITION { cnvpytor: \$(echo \$(cnvpytor --version 2>&1) | sed 's/^.*pyCNVnator //; s/Using.*\$//' )) END_VERSIONS """ + + stub: + """ + touch test.pytor + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + cnvpytor: \$(echo \$(cnvpytor --version 2>&1) | sed 's/^.*pyCNVnator //; s/Using.*\$//' )) + END_VERSIONS + """ } diff --git a/tests/modules/cnvpytor/callcnvs/test.yml b/tests/modules/cnvpytor/callcnvs/test.yml index 85bfcc7a..4565151e 100644 --- a/tests/modules/cnvpytor/callcnvs/test.yml +++ b/tests/modules/cnvpytor/callcnvs/test.yml @@ -4,7 +4,17 @@ - cnvpytor - cnvpytor/callcnvs files: - - path: output/cnvpytor/calls.10000.tsv + - path: output/cnvpytor/test.tsv md5sum: d41d8cd98f00b204e9800998ecf8427e - path: output/cnvpytor/versions.yml - md5sum: 5fe6ca3ef5c40f9dbf487f28db237821 + md5sum: 0bea08a253fcb2ff0ff79b99df77b9fa + +- name: cnvpytor callcnvs test_cnvpytor_callcnvs stub + command: nextflow run tests/modules/cnvpytor/callcnvs -entry test_cnvpytor_callcnvs -c tests/config/nextflow.config -stub-run + tags: + - cnvpytor + - cnvpytor/callcnvs + files: + - path: output/cnvpytor/test.tsv + - path: output/cnvpytor/versions.yml + md5sum: 0bea08a253fcb2ff0ff79b99df77b9fa diff --git a/tests/modules/cnvpytor/histogram/test.yml b/tests/modules/cnvpytor/histogram/test.yml index fd8bcaf4..0543fcc3 100644 --- a/tests/modules/cnvpytor/histogram/test.yml +++ b/tests/modules/cnvpytor/histogram/test.yml @@ -7,4 +7,14 @@ - path: output/cnvpytor/test.pytor md5sum: aa03a8fa15b39f77816705a48e10312a - path: output/cnvpytor/versions.yml - md5sum: 9a4b176afd5f1a3edeb37eeb301cf464 + md5sum: 0f4d75c4f3a3eb26c22616d12b0b78b2 + +- name: cnvpytor histogram test_cnvpytor_histogram stub + command: nextflow run tests/modules/cnvpytor/histogram -entry test_cnvpytor_histogram -c tests/config/nextflow.config -stub-run + tags: + - cnvpytor + - cnvpytor/histogram + files: + - path: output/cnvpytor/test.pytor + - path: output/cnvpytor/versions.yml + md5sum: 0f4d75c4f3a3eb26c22616d12b0b78b2 diff --git a/tests/modules/cnvpytor/importreaddepth/nextflow.config b/tests/modules/cnvpytor/importreaddepth/nextflow.config index c60f979e..4383e375 100644 --- a/tests/modules/cnvpytor/importreaddepth/nextflow.config +++ b/tests/modules/cnvpytor/importreaddepth/nextflow.config @@ -8,5 +8,5 @@ process { } params { - cnvpytor_chr = '' // specifies chromosome name(s) the same way as they are described in the sam/bam/cram header e.g. '1 2' or 'chr1 chr2'. + cnvpytor_chr = null // specifies chromosome name(s) the same way as they are described in the sam/bam/cram header e.g. '1 2' or 'chr1 chr2'. } diff --git a/tests/modules/cnvpytor/importreaddepth/test.yml b/tests/modules/cnvpytor/importreaddepth/test.yml new file mode 100644 index 00000000..b148c38e --- /dev/null +++ b/tests/modules/cnvpytor/importreaddepth/test.yml @@ -0,0 +1,39 @@ +- name: cnvpytor importreaddepth test_cnvpytor_importreaddepth + command: nextflow run tests/modules/cnvpytor/importreaddepth -entry test_cnvpytor_importreaddepth -c tests/config/nextflow.config + tags: + - cnvpytor + - cnvpytor/importreaddepth + files: + - path: output/cnvpytor/test.pytor + - path: output/cnvpytor/versions.yml + md5sum: 5834495324c08a37f3fd73ccdd881dc8 + +- name: cnvpytor importreaddepth test_cnvpytor_importreaddepth stub + command: nextflow run tests/modules/cnvpytor/importreaddepth -entry test_cnvpytor_importreaddepth -c tests/config/nextflow.config -stub-run + tags: + - cnvpytor + - cnvpytor/importreaddepth + files: + - path: output/cnvpytor/test.pytor + - path: output/cnvpytor/versions.yml + md5sum: 5834495324c08a37f3fd73ccdd881dc8 + +- name: cnvpytor importreaddepth test_cnvpytor_importreaddepth_cram + command: nextflow run tests/modules/cnvpytor/importreaddepth -entry test_cnvpytor_importreaddepth_cram -c tests/config/nextflow.config + tags: + - cnvpytor + - cnvpytor/importreaddepth + files: + - path: output/cnvpytor/test.pytor + - path: output/cnvpytor/versions.yml + md5sum: dfa0afb0982d985b96d1633f71ebb82a + +- name: cnvpytor importreaddepth test_cnvpytor_importreaddepth_cram stub + command: nextflow run tests/modules/cnvpytor/importreaddepth -entry test_cnvpytor_importreaddepth_cram -c tests/config/nextflow.config -stub-run + tags: + - cnvpytor + - cnvpytor/importreaddepth + files: + - path: output/cnvpytor/test.pytor + - path: output/cnvpytor/versions.yml + md5sum: dfa0afb0982d985b96d1633f71ebb82a diff --git a/tests/modules/cnvpytor/partition/test.yml b/tests/modules/cnvpytor/partition/test.yml index 1b838395..10232097 100644 --- a/tests/modules/cnvpytor/partition/test.yml +++ b/tests/modules/cnvpytor/partition/test.yml @@ -7,4 +7,14 @@ - path: output/cnvpytor/test.pytor md5sum: aa03a8fa15b39f77816705a48e10312a - path: output/cnvpytor/versions.yml - md5sum: 8a04506554c58cd170cc050fd9904c6f + md5sum: 7fd6ec952a316463bcd324f176b46b64 + +- name: cnvpytor partition test_cnvpytor_partition stub + command: nextflow run tests/modules/cnvpytor/partition -entry test_cnvpytor_partition -c tests/config/nextflow.config -stub-run + tags: + - cnvpytor + - cnvpytor/partition + files: + - path: output/cnvpytor/test.pytor + - path: output/cnvpytor/versions.yml + md5sum: 7fd6ec952a316463bcd324f176b46b64 From f1c5384c31e985591716afdd732cf8c2ae29d05b Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Sun, 3 Apr 2022 16:06:22 +0200 Subject: [PATCH 091/283] Add PRINSEQPLUSPLUS (#1481) * fix: remove left-over unnecessary code * Add prinseq++ * Remove last todo * Fix tests due to variability of output FASTQs (reads can be ordered differently between runs) * Apply suggestions from code review --- modules/prinseqplusplus/main.nf | 61 +++++++++++++++++++ modules/prinseqplusplus/meta.yml | 60 ++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/prinseqplusplus/main.nf | 24 ++++++++ tests/modules/prinseqplusplus/nextflow.config | 9 +++ tests/modules/prinseqplusplus/test.yml | 27 ++++++++ 6 files changed, 185 insertions(+) create mode 100644 modules/prinseqplusplus/main.nf create mode 100644 modules/prinseqplusplus/meta.yml create mode 100644 tests/modules/prinseqplusplus/main.nf create mode 100644 tests/modules/prinseqplusplus/nextflow.config create mode 100644 tests/modules/prinseqplusplus/test.yml diff --git a/modules/prinseqplusplus/main.nf b/modules/prinseqplusplus/main.nf new file mode 100644 index 00000000..ebd8c58c --- /dev/null +++ b/modules/prinseqplusplus/main.nf @@ -0,0 +1,61 @@ +process PRINSEQPLUSPLUS { + tag "$meta.id" + label 'process_low' + + conda (params.enable_conda ? "bioconda::prinseq-plus-plus=1.2.3" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/prinseq-plus-plus:1.2.3--hc90279e_1': + 'quay.io/biocontainers/prinseq-plus-plus:1.2.3--hc90279e_1' }" + + input: + tuple val(meta), path(reads) + + output: + tuple val(meta), path("*_good_out*.fastq.gz") , emit: good_reads + tuple val(meta), path("*_single_out*.fastq.gz"), optional: true, emit: single_reads + tuple val(meta), path("*_bad_out*.fastq.gz") , optional: true, emit: bad_reads + tuple val(meta), path("*.log") , emit: log + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + + if (meta.single_end) { + """ + prinseq++ \\ + -threads $task.cpus \\ + -fastq ${reads} \\ + -out_name ${prefix} \\ + -out_gz \\ + -VERBOSE 1 \\ + $args \\ + | tee ${prefix}.log + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + prinseqplusplus: \$(echo \$(prinseq++ --version | cut -f 2 -d ' ' )) + END_VERSIONS + """ + } else { + """ + prinseq++ \\ + -threads $task.cpus \\ + -fastq ${reads[0]} \\ + -fastq2 ${reads[1]} \\ + -out_name ${prefix} \\ + -out_gz \\ + -VERBOSE 1 \\ + $args \\ + | tee ${prefix}.log + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + prinseqplusplus: \$(echo \$(prinseq++ --version | cut -f 2 -d ' ' )) + END_VERSIONS + """ + } +} diff --git a/modules/prinseqplusplus/meta.yml b/modules/prinseqplusplus/meta.yml new file mode 100644 index 00000000..8155df93 --- /dev/null +++ b/modules/prinseqplusplus/meta.yml @@ -0,0 +1,60 @@ +name: "prinseqplusplus" +description: PRINSEQ++ is a C++ implementation of the prinseq-lite.pl program. It can be used to filter, reformat or trim genomic and metagenomic sequence data +keywords: + - fastq + - fasta + - filter + - trim +tools: + - "prinseqplusplus": + description: "PRINSEQ++ - Multi-threaded C++ sequence cleaning" + homepage: "https://github.com/Adrian-Cantu/PRINSEQ-plus-plus" + documentation: "https://github.com/Adrian-Cantu/PRINSEQ-plus-plus" + tool_dev_url: "https://github.com/Adrian-Cantu/PRINSEQ-plus-plus" + doi: "10.7287/peerj.preprints.27553v1" + licence: "['GPL v2']" + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: | + List of input FastQ files of size 1 and 2 for single-end and paired-end + data, respectively. + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - good_reads: + type: file + description: Reads passing filter(s) in gzipped FASTQ format + pattern: "*_good_out_{R1,R2}.fastq.gz" + - single_reads: + type: file + description: | + Single reads without the pair passing filter(s) in gzipped FASTQ format + pattern: "*_single_out_{R1,R2}.fastq.gz" + - bad_reads: + type: file + description: | + Reads without not passing filter(s) in gzipped FASTQ format + pattern: "*_bad_out_{R1,R2}.fastq.gz" + - log: + type: file + description: | + Verbose level 2 STDOUT information in a log file + pattern: "*.log" + +authors: + - "@jfy133" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 24bfe641..6d66f230 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -1407,6 +1407,10 @@ preseq/lcextrap: - modules/preseq/lcextrap/** - tests/modules/preseq/lcextrap/** +prinseqplusplus: + - modules/prinseqplusplus/** + - tests/modules/prinseqplusplus/** + prodigal: - modules/prodigal/** - tests/modules/prodigal/** diff --git a/tests/modules/prinseqplusplus/main.nf b/tests/modules/prinseqplusplus/main.nf new file mode 100644 index 00000000..d6ee3be9 --- /dev/null +++ b/tests/modules/prinseqplusplus/main.nf @@ -0,0 +1,24 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { PRINSEQPLUSPLUS } from '../../../modules/prinseqplusplus/main.nf' + +workflow test_prinseqplusplus_single_end { + + input = [ [ id:'test', single_end:true ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] + ] + + PRINSEQPLUSPLUS ( input ) +} + +workflow test_prinseqplusplus_paired_end { + + input = [ [ id:'test', single_end:false ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] + ] + + PRINSEQPLUSPLUS ( input ) +} diff --git a/tests/modules/prinseqplusplus/nextflow.config b/tests/modules/prinseqplusplus/nextflow.config new file mode 100644 index 00000000..032e5713 --- /dev/null +++ b/tests/modules/prinseqplusplus/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: PRINSEQPLUSPLUS { + ext.args = "-lc_entropy=0.8" + } + +} diff --git a/tests/modules/prinseqplusplus/test.yml b/tests/modules/prinseqplusplus/test.yml new file mode 100644 index 00000000..e034febb --- /dev/null +++ b/tests/modules/prinseqplusplus/test.yml @@ -0,0 +1,27 @@ +- name: prinseqplusplus test_prinseqplusplus_single_end + command: nextflow run tests/modules/prinseqplusplus -entry test_prinseqplusplus_single_end -c tests/config/nextflow.config + tags: + - prinseqplusplus + files: + - path: output/prinseqplusplus/test.log + contains: + - "reads removed by -lc_entropy" + - path: output/prinseqplusplus/test_bad_out.fastq.gz + - path: output/prinseqplusplus/test_good_out.fastq.gz + - path: output/prinseqplusplus/versions.yml + +- name: prinseqplusplus test_prinseqplusplus_paired_end + command: nextflow run tests/modules/prinseqplusplus -entry test_prinseqplusplus_paired_end -c tests/config/nextflow.config + tags: + - prinseqplusplus + files: + - path: output/prinseqplusplus/test.log + contains: + - "reads removed by -lc_entropy" + - path: output/prinseqplusplus/test_bad_out_R1.fastq.gz + - path: output/prinseqplusplus/test_bad_out_R2.fastq.gz + - path: output/prinseqplusplus/test_good_out_R1.fastq.gz + - path: output/prinseqplusplus/test_good_out_R2.fastq.gz + - path: output/prinseqplusplus/test_single_out_R1.fastq.gz + - path: output/prinseqplusplus/test_single_out_R2.fastq.gz + - path: output/prinseqplusplus/versions.yml From 6a11c5a2226436f5543d582b81835819d0767637 Mon Sep 17 00:00:00 2001 From: Sofia Stamouli <91951607+sofstam@users.noreply.github.com> Date: Mon, 4 Apr 2022 10:18:11 +0200 Subject: [PATCH 092/283] Fix untar for centrifuge (#1472) --- modules/centrifuge/main.nf | 4 ++-- modules/centrifuge/meta.yml | 3 +++ tests/modules/centrifuge/main.nf | 13 +++++++++---- 3 files changed, 14 insertions(+), 6 deletions(-) diff --git a/modules/centrifuge/main.nf b/modules/centrifuge/main.nf index 7eb566da..c9ec377b 100644 --- a/modules/centrifuge/main.nf +++ b/modules/centrifuge/main.nf @@ -10,6 +10,7 @@ process CENTRIFUGE { input: tuple val(meta), path(reads) path db + val db_name val save_unaligned val save_aligned val sam_format @@ -42,9 +43,8 @@ process CENTRIFUGE { } def sam_output = sam_format ? "--out-fmt 'sam'" : '' """ - tar -xf $db centrifuge \\ - -x $db_name \\ + -x ${db}/${db_name} \\ -p $task.cpus \\ $paired \\ --report-file ${prefix}.report.txt \\ diff --git a/modules/centrifuge/meta.yml b/modules/centrifuge/meta.yml index 3adf0e23..aabb465f 100644 --- a/modules/centrifuge/meta.yml +++ b/modules/centrifuge/meta.yml @@ -27,6 +27,9 @@ input: type: directory description: Centrifuge database in .tar.gz format pattern: "*.tar.gz" + - db_name: + type: string + description: Centrifuge database filenames without the suffix ".cf" - save_unaligned: type: value description: If true unmapped fastq files are saved diff --git a/tests/modules/centrifuge/main.nf b/tests/modules/centrifuge/main.nf index a8eb2fcb..37393ce5 100644 --- a/tests/modules/centrifuge/main.nf +++ b/tests/modules/centrifuge/main.nf @@ -2,18 +2,21 @@ nextflow.enable.dsl = 2 +include { UNTAR } from '../../../modules/untar/main.nf' include { CENTRIFUGE } from '../../../modules/centrifuge/main.nf' workflow test_centrifuge_single_end { input = [ [ id:'test', single_end:true ], // meta map [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] ] - db = file("https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/delete_me/minigut_cf.tar.gz", checkIfExists: true) + db = [ [], file('https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/delete_me/minigut_cf.tar.gz', checkIfExists: true) ] + db_name = "minigut_cf" save_unaligned = true save_aligned = false sam_format = false - CENTRIFUGE ( input, db, save_unaligned, save_aligned, sam_format ) + UNTAR ( db ) + CENTRIFUGE ( input, UNTAR.out.untar.map{ it[1] },db_name, save_unaligned, save_aligned, sam_format ) } @@ -22,12 +25,14 @@ workflow test_centrifuge_paired_end { [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] ] - db = file("https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/delete_me/minigut_cf.tar.gz", checkIfExists: true) + db = [ [], file('https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/delete_me/minigut_cf.tar.gz', checkIfExists: true) ] + db_name = "minigut_cf" save_unaligned = true save_aligned = false sam_format = false - CENTRIFUGE ( input, db, save_unaligned, save_aligned, sam_format ) + UNTAR ( db ) + CENTRIFUGE ( input, UNTAR.out.untar.map{ it[1] }, db_name, save_unaligned, save_aligned, sam_format ) } From cb54d1ebd77de5b482cae89ed9e51fa6ef97d3ee Mon Sep 17 00:00:00 2001 From: Ramprasad Neethiraj <20065894+ramprasadn@users.noreply.github.com> Date: Mon, 4 Apr 2022 13:18:02 +0200 Subject: [PATCH 093/283] update stubs (#1488) --- modules/cnvpytor/histogram/main.nf | 2 +- modules/cnvpytor/partition/main.nf | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/cnvpytor/histogram/main.nf b/modules/cnvpytor/histogram/main.nf index d1c6856c..9e59c6b8 100644 --- a/modules/cnvpytor/histogram/main.nf +++ b/modules/cnvpytor/histogram/main.nf @@ -32,7 +32,7 @@ process CNVPYTOR_HISTOGRAM { stub: """ - touch test.pytor + touch ${pytor.baseName}.pytor cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/modules/cnvpytor/partition/main.nf b/modules/cnvpytor/partition/main.nf index 975458bf..0311bdfc 100644 --- a/modules/cnvpytor/partition/main.nf +++ b/modules/cnvpytor/partition/main.nf @@ -32,7 +32,7 @@ process CNVPYTOR_PARTITION { stub: """ - touch test.pytor + touch ${pytor.baseName}.pytor cat <<-END_VERSIONS > versions.yml "${task.process}": From ae48653bd2d169510580220bb62d96f830c31293 Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Mon, 4 Apr 2022 13:31:36 +0200 Subject: [PATCH 094/283] DASTool output channels update (#1489) * fix: remove left-over unnecessary code * Make summary output optional as not generated if no sufficiently HQ bins found * Make contig2bin optional as only generated if sufficient HQ bins found --- modules/dastool/dastool/main.nf | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/dastool/dastool/main.nf b/modules/dastool/dastool/main.nf index 968f85de..a7d9c6f6 100644 --- a/modules/dastool/dastool/main.nf +++ b/modules/dastool/dastool/main.nf @@ -14,8 +14,8 @@ process DASTOOL_DASTOOL { output: tuple val(meta), path("*.log") , emit: log - tuple val(meta), path("*_summary.tsv") , emit: summary - tuple val(meta), path("*_DASTool_contig2bin.tsv") , emit: contig2bin + tuple val(meta), path("*_summary.tsv") , optional: true, emit: summary + tuple val(meta), path("*_DASTool_contig2bin.tsv") , optional: true, emit: contig2bin tuple val(meta), path("*.eval") , optional: true, emit: eval tuple val(meta), path("*_DASTool_bins/*.fa") , optional: true, emit: bins tuple val(meta), path("*.pdf") , optional: true, emit: pdfs From 13cc32399cdaa866092b1bbc6e8a982d51c455db Mon Sep 17 00:00:00 2001 From: Edmund Miller Date: Mon, 4 Apr 2022 13:21:37 -0500 Subject: [PATCH 095/283] feat(homer): Add groseq subworkflow (#1492) * feat(homer): Add groseq subworkflow * fix(homer): Update groseq paths * test(homer): Update groseq bam md5sums * test(homer): Update bed process args wildcard * test(homer): Update groseq bed md5s * style: Run prettier * style(homer): Align comments Co-authored-by: Friederike Hanssen * docs(homer): Add groseq meta.yml Co-authored-by: Friederike Hanssen --- subworkflows/nf-core/homer/groseq/main.nf | 50 +++++++++++++++++++ subworkflows/nf-core/homer/groseq/meta.yml | 48 ++++++++++++++++++ .../subworkflows/nf-core/homer/groseq/main.nf | 24 +++++++++ .../nf-core/homer/groseq/nextflow.config | 9 ++++ .../nf-core/homer/groseq/test.yml | 27 ++++++++++ 5 files changed, 158 insertions(+) create mode 100644 subworkflows/nf-core/homer/groseq/main.nf create mode 100644 subworkflows/nf-core/homer/groseq/meta.yml create mode 100644 tests/subworkflows/nf-core/homer/groseq/main.nf create mode 100644 tests/subworkflows/nf-core/homer/groseq/nextflow.config create mode 100644 tests/subworkflows/nf-core/homer/groseq/test.yml diff --git a/subworkflows/nf-core/homer/groseq/main.nf b/subworkflows/nf-core/homer/groseq/main.nf new file mode 100644 index 00000000..b83c7e21 --- /dev/null +++ b/subworkflows/nf-core/homer/groseq/main.nf @@ -0,0 +1,50 @@ +/* + * Identify transcripts with homer + */ + +include { HOMER_MAKETAGDIRECTORY } from '../../../../modules/homer/maketagdirectory/main' +include { HOMER_MAKEUCSCFILE } from '../../../../modules/homer/makeucscfile/main' +include { HOMER_FINDPEAKS } from '../../../../modules/homer/findpeaks/main' +include { HOMER_POS2BED } from '../../../../modules/homer/pos2bed/main' + +workflow HOMER_GROSEQ { + take: + bam // channel: [ val(meta), [ reads ] ] + fasta // file: /path/to/bwa/index/ + + main: + + ch_versions = Channel.empty() + + /* + * Create a Tag Directory From The GRO-Seq experiment + */ + HOMER_MAKETAGDIRECTORY ( bam, fasta ) + ch_versions = ch_versions.mix(HOMER_MAKETAGDIRECTORY.out.versions.first()) + + /* + * Creating UCSC Visualization Files + */ + HOMER_MAKEUCSCFILE ( HOMER_MAKETAGDIRECTORY.out.tagdir ) + ch_versions = ch_versions.mix(HOMER_MAKEUCSCFILE.out.versions.first()) + + /* + * Find transcripts directly from GRO-Seq + */ + HOMER_FINDPEAKS ( HOMER_MAKETAGDIRECTORY.out.tagdir ) + ch_versions = ch_versions.mix(HOMER_FINDPEAKS.out.versions.first()) + + /* + * Convert peak file to bed file + */ + HOMER_POS2BED ( HOMER_FINDPEAKS.out.txt ) + ch_versions = ch_versions.mix(HOMER_POS2BED.out.versions.first()) + + emit: + tagdir = HOMER_MAKETAGDIRECTORY.out.tagdir // channel: [ val(meta), [ tagdir ] ] + bed_graph = HOMER_MAKEUCSCFILE.out.bedGraph // channel: [ val(meta), [ tag_dir/*ucsc.bedGraph.gz ] ] + peaks = HOMER_FINDPEAKS.out.txt // channel: [ val(meta), [ *peaks.txt ] ] + bed = HOMER_POS2BED.out.bed // channel: [ val(meta), [ *peaks.txt ] ] + + versions = ch_versions // channel: [ versions.yml ] +} diff --git a/subworkflows/nf-core/homer/groseq/meta.yml b/subworkflows/nf-core/homer/groseq/meta.yml new file mode 100644 index 00000000..4bd36a88 --- /dev/null +++ b/subworkflows/nf-core/homer/groseq/meta.yml @@ -0,0 +1,48 @@ +name: homer_groseq +description: Perform variant calling on a set of normal samples using mutect2 panel of normals mode. Group them into a genomicsdbworkspace using genomicsdbimport, then use this to create a panel of normals using createsomaticpanelofnormals. +keywords: + - homer + - groseq + - nascent +modules: + - homer/maketagdirectory + - homer/makeucscfile + - homer/findpeaks + - homer/pos2bed +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test' ] + - input: + type: list + description: list of BAM files, also able to take SAM and BED as input + pattern: "[ *.{bam/sam/bed} ]" + - fasta: + type: file + description: The reference fasta file + pattern: "*.fasta" +output: + - tagdir: + type: directory + description: The "Tag Directory" + pattern: "*_tagdir" + - bedGraph: + type: file + description: The UCSC bed graph + pattern: "*.bedGraph.gz" + - peaks: + type: file + description: The found peaks + pattern: "*.peaks.txt" + - bed: + type: file + description: A BED file of the found peaks + pattern: "*.bed" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" +authors: + - "@Emiller88" diff --git a/tests/subworkflows/nf-core/homer/groseq/main.nf b/tests/subworkflows/nf-core/homer/groseq/main.nf new file mode 100644 index 00000000..72b95e87 --- /dev/null +++ b/tests/subworkflows/nf-core/homer/groseq/main.nf @@ -0,0 +1,24 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { HOMER_GROSEQ as HOMER_GROSEQ_BAM + HOMER_GROSEQ as HOMER_GROSEQ_BED } from '../../../../../subworkflows/nf-core/homer/groseq/main' + +workflow test_homer_groseq_bam { + def input = [] + input = [[ id: 'test' ], + [ file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true)]] + def fasta = [ file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ] + + HOMER_GROSEQ_BAM ( input, fasta ) +} + +workflow test_homer_groseq_bed { + def input = [] + input = [[ id: 'test' ], + [ file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true)]] + def fasta = [ file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ] + + HOMER_GROSEQ_BED ( input, fasta ) +} diff --git a/tests/subworkflows/nf-core/homer/groseq/nextflow.config b/tests/subworkflows/nf-core/homer/groseq/nextflow.config new file mode 100644 index 00000000..09a44497 --- /dev/null +++ b/tests/subworkflows/nf-core/homer/groseq/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: '.*:HOMER_GROSEQ_BED:HOMER_MAKETAGDIRECTORY' { + ext.args = "-checkGC -format bed" + } + +} diff --git a/tests/subworkflows/nf-core/homer/groseq/test.yml b/tests/subworkflows/nf-core/homer/groseq/test.yml new file mode 100644 index 00000000..1eaeb3f5 --- /dev/null +++ b/tests/subworkflows/nf-core/homer/groseq/test.yml @@ -0,0 +1,27 @@ +- name: subworkflow homer_groseq bam + command: nextflow run ./tests/subworkflows/nf-core/homer/groseq/ -entry test_homer_groseq_bam -c tests/config/nextflow.config -c tests/subworkflows/nf-core/homer/groseq/nextflow.config + tags: + - homer + files: + - path: output/homer/test.bed + md5sum: 8d40034dfe22c5cf973071aa1e8d3617 + - path: output/homer/test.bedGraph.gz + md5sum: de2b2f8ab90a909b8bfbe755bdaba407 + - path: output/homer/test.peaks.txt + md5sum: 8d40034dfe22c5cf973071aa1e8d3617 + - path: output/homer/versions.yml + md5sum: c85dee03f1afabe406a87743a4c5506d + +- name: subworkflow homer_groseq bed + command: nextflow run ./tests/subworkflows/nf-core/homer/groseq/ -entry test_homer_groseq_bed -c tests/config/nextflow.config -c tests/subworkflows/nf-core/homer/groseq/nextflow.config + tags: + - homer + files: + - path: output/homer/test.bed + md5sum: 25e8b64946012d1c4567a04062e90fae + - path: output/homer/test.bedGraph.gz + md5sum: 2d2d1c2d3242ff74c7a922695accb9d2 + - path: output/homer/test.peaks.txt + md5sum: 25e8b64946012d1c4567a04062e90fae + - path: output/homer/versions.yml + md5sum: c9b5f1248d28c216b000cba8da738455 From 879d42c5e28661fe0a5e744c9e2c515868f9e08a Mon Sep 17 00:00:00 2001 From: "Moritz E. Beber" Date: Mon, 4 Apr 2022 21:40:35 +0200 Subject: [PATCH 096/283] Refactor adapterremoval (#1491) * refactor: insert .fastq file extensions * style: insert whitespace * refactor: create paired output * refactor: rename settings from log Requested by @jfy133 * tests: correct expected output * fix: remove settings option due to default * chore: rename output patterns * refactor: omit paired files in single-end * refactor: rename output to settings --- modules/adapterremoval/main.nf | 44 ++++++++++++++++++++------- modules/adapterremoval/meta.yml | 14 ++++----- tests/modules/adapterremoval/test.yml | 26 ++++++++-------- 3 files changed, 53 insertions(+), 31 deletions(-) diff --git a/modules/adapterremoval/main.nf b/modules/adapterremoval/main.nf index 9d16b9c9..0e17c055 100644 --- a/modules/adapterremoval/main.nf +++ b/modules/adapterremoval/main.nf @@ -12,15 +12,14 @@ process ADAPTERREMOVAL { path(adapterlist) output: - tuple val(meta), path("${prefix}.truncated.gz") , optional: true, emit: singles_truncated - tuple val(meta), path("${prefix}.discarded.gz") , optional: true, emit: discarded - tuple val(meta), path("${prefix}.pair1.truncated.gz") , optional: true, emit: pair1_truncated - tuple val(meta), path("${prefix}.pair2.truncated.gz") , optional: true, emit: pair2_truncated - tuple val(meta), path("${prefix}.collapsed.gz") , optional: true, emit: collapsed - tuple val(meta), path("${prefix}.collapsed.truncated.gz") , optional: true, emit: collapsed_truncated - tuple val(meta), path("${prefix}.paired.gz") , optional: true, emit: paired_interleaved - tuple val(meta), path('*.log') , emit: log - path "versions.yml" , emit: versions + tuple val(meta), path("${prefix}.truncated.fastq.gz") , optional: true, emit: singles_truncated + tuple val(meta), path("${prefix}.discarded.fastq.gz") , optional: true, emit: discarded + tuple val(meta), path("${prefix}.pair{1,2}.truncated.fastq.gz") , optional: true, emit: paired_truncated + tuple val(meta), path("${prefix}.collapsed.fastq.gz") , optional: true, emit: collapsed + tuple val(meta), path("${prefix}.collapsed.truncated.fastq.gz") , optional: true, emit: collapsed_truncated + tuple val(meta), path("${prefix}.paired.fastq.gz") , optional: true, emit: paired_interleaved + tuple val(meta), path('*.settings') , emit: settings + path "versions.yml" , emit: versions when: task.ext.when == null || task.ext.when @@ -38,10 +37,19 @@ process ADAPTERREMOVAL { $adapterlist \\ --basename ${prefix} \\ --threads ${task.cpus} \\ - --settings ${prefix}.log \\ --seed 42 \\ --gzip + ensure_fastq() { + if [ -f "\${1}" ]; then + mv "\${1}" "\${1::-3}.fastq.gz" + fi + + } + + ensure_fastq '${prefix}.truncated.gz' + ensure_fastq '${prefix}.discarded.gz' + cat <<-END_VERSIONS > versions.yml "${task.process}": adapterremoval: \$(AdapterRemoval --version 2>&1 | sed -e "s/AdapterRemoval ver. //g") @@ -56,10 +64,24 @@ process ADAPTERREMOVAL { $adapterlist \\ --basename ${prefix} \\ --threads $task.cpus \\ - --settings ${prefix}.log \\ --seed 42 \\ --gzip + ensure_fastq() { + if [ -f "\${1}" ]; then + mv "\${1}" "\${1::-3}.fastq.gz" + fi + + } + + ensure_fastq '${prefix}.truncated.gz' + ensure_fastq '${prefix}.discarded.gz' + ensure_fastq '${prefix}.pair1.truncated.gz' + ensure_fastq '${prefix}.pair2.truncated.gz' + ensure_fastq '${prefix}.collapsed.gz' + ensure_fastq '${prefix}.collapsed.truncated.gz' + ensure_fastq '${prefix}.paired.gz' + cat <<-END_VERSIONS > versions.yml "${task.process}": adapterremoval: \$(AdapterRemoval --version 2>&1 | sed -e "s/AdapterRemoval ver. //g") diff --git a/modules/adapterremoval/meta.yml b/modules/adapterremoval/meta.yml index 5faad043..77273f60 100644 --- a/modules/adapterremoval/meta.yml +++ b/modules/adapterremoval/meta.yml @@ -43,43 +43,43 @@ output: Adapter trimmed FastQ files of either single-end reads, or singleton 'orphaned' reads from merging of paired-end data (i.e., one of the pair was lost due to filtering thresholds). - pattern: "*.truncated.gz" + pattern: "*.truncated.fastq.gz" - discarded: type: file description: | Adapter trimmed FastQ files of reads that did not pass filtering thresholds. - pattern: "*.discarded.gz" + pattern: "*.discarded.fastq.gz" - pair1_truncated: type: file description: | Adapter trimmed R1 FastQ files of paired-end reads that did not merge with their respective R2 pair due to long templates. The respective pair is stored in 'pair2_truncated'. - pattern: "*.pair1.truncated.gz" + pattern: "*.pair1.truncated.fastq.gz" - pair2_truncated: type: file description: | Adapter trimmed R2 FastQ files of paired-end reads that did not merge with their respective R1 pair due to long templates. The respective pair is stored in 'pair1_truncated'. - pattern: "*.pair2.truncated.gz" + pattern: "*.pair2.truncated.fastq.gz" - collapsed: type: file description: | Collapsed FastQ of paired-end reads that successfully merged with their respective R1 pair but were not trimmed. - pattern: "*.collapsed.gz" + pattern: "*.collapsed.fastq.gz" - collapsed_truncated: type: file description: | Collapsed FastQ of paired-end reads that successfully merged with their respective R1 pair and were trimmed of adapter due to sufficient overlap. - pattern: "*.collapsed.truncated.gz" + pattern: "*.collapsed.truncated.fastq.gz" - log: type: file description: AdapterRemoval log file - pattern: "*.log" + pattern: "*.settings" - versions: type: file description: File containing software versions diff --git a/tests/modules/adapterremoval/test.yml b/tests/modules/adapterremoval/test.yml index f6adfba3..e660da76 100644 --- a/tests/modules/adapterremoval/test.yml +++ b/tests/modules/adapterremoval/test.yml @@ -3,10 +3,10 @@ tags: - adapterremoval files: - - path: output/adapterremoval/test.discarded.gz - - path: output/adapterremoval/test.log + - path: output/adapterremoval/test.discarded.fastq.gz + - path: output/adapterremoval/test.settings md5sum: 2fd3d5d703b63ba33a83021fccf25f77 - - path: output/adapterremoval/test.truncated.gz + - path: output/adapterremoval/test.truncated.fastq.gz md5sum: 62139afee94defad5b83bdd0b8475a1f - path: output/adapterremoval/versions.yml md5sum: ac5b46719719b7ee62739530b80869fc @@ -16,12 +16,12 @@ tags: - adapterremoval files: - - path: output/adapterremoval/test.discarded.gz - - path: output/adapterremoval/test.log + - path: output/adapterremoval/test.discarded.fastq.gz + - path: output/adapterremoval/test.settings md5sum: b8a451d3981b327f3fdb44f40ba2d6d1 - - path: output/adapterremoval/test.pair1.truncated.gz + - path: output/adapterremoval/test.pair1.truncated.fastq.gz md5sum: 294a6277f0139bd597e57c6fa31f39c7 - - path: output/adapterremoval/test.pair2.truncated.gz + - path: output/adapterremoval/test.pair2.truncated.fastq.gz md5sum: de7b38e2c881bced8671acb1ab452d78 - path: output/adapterremoval/versions.yml md5sum: fa621c887897da5a379c719399c17db7 @@ -31,15 +31,15 @@ tags: - adapterremoval files: - - path: output/adapterremoval/test.collapsed.gz + - path: output/adapterremoval/test.collapsed.fastq.gz md5sum: ff956de3532599a56c3efe5369f0953f - - path: output/adapterremoval/test.collapsed.truncated.gz - - path: output/adapterremoval/test.discarded.gz - - path: output/adapterremoval/test.log + - path: output/adapterremoval/test.collapsed.truncated.fastq.gz + - path: output/adapterremoval/test.discarded.fastq.gz + - path: output/adapterremoval/test.settings md5sum: 7f0b2328152226e46101a535cce718b3 - - path: output/adapterremoval/test.pair1.truncated.gz + - path: output/adapterremoval/test.pair1.truncated.fastq.gz md5sum: 683be19bc1c83008944b6b719bfa34e1 - - path: output/adapterremoval/test.pair2.truncated.gz + - path: output/adapterremoval/test.pair2.truncated.fastq.gz md5sum: e6548fe061f3ef86368b26da930174d0 - path: output/adapterremoval/versions.yml md5sum: 78f589bb313c8da0147ca8ce77d7f3bf From 797ce3254e1868b224ec5c2742418876af254c35 Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Tue, 5 Apr 2022 11:06:46 +0200 Subject: [PATCH 097/283] Update: biobambam/bammarkduplicates2 to v2.0.183 (#1493) * bump version, remove md5sums from test * re-add md5sums --- modules/biobambam/bammarkduplicates2/main.nf | 6 ++---- tests/modules/biobambam/bammarkduplicates2/test.yml | 4 ++-- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/modules/biobambam/bammarkduplicates2/main.nf b/modules/biobambam/bammarkduplicates2/main.nf index a93e55b5..dd0e55b6 100644 --- a/modules/biobambam/bammarkduplicates2/main.nf +++ b/modules/biobambam/bammarkduplicates2/main.nf @@ -2,10 +2,8 @@ process BIOBAMBAM_BAMMARKDUPLICATES2 { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? "bioconda::biobambam=2.0.182" : null) - container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/biobambam:2.0.182--h7d875b9_0': - 'quay.io/biocontainers/biobambam:2.0.182--h7d875b9_0' }" + conda (params.enable_conda ? "bioconda::biobambam=2.0.183" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/biobambam:2.0.183--h9f5acd7_1' : 'quay.io/biocontainers/biobambam:2.0.183--h9f5acd7_1'}" input: tuple val(meta), path(bam) diff --git a/tests/modules/biobambam/bammarkduplicates2/test.yml b/tests/modules/biobambam/bammarkduplicates2/test.yml index d046dfe9..7c16fcf1 100644 --- a/tests/modules/biobambam/bammarkduplicates2/test.yml +++ b/tests/modules/biobambam/bammarkduplicates2/test.yml @@ -5,8 +5,8 @@ - biobambam files: - path: output/biobambam/test.bam - md5sum: 1cf7f957eb20b4ace9f10d0cf0a0649a + md5sum: 603edff09029096ddf2bb8a3f12d7aa7 - path: output/biobambam/test.metrics.txt md5sum: 30d6e7d90bb5df46329d4bc0144ce927 - path: output/biobambam/versions.yml - md5sum: 0d6f3137ed4515333d73c779f2c24445 + md5sum: dfdf2b084655d124acac0bfb4eda86cc From dc95e67e153ad937b869d90229333c0654628912 Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Wed, 6 Apr 2022 08:18:23 +0200 Subject: [PATCH 098/283] New tool: biobambam/bamsormadup (#1478) * add bamsormadup * fix yaml * add test.yml * Update tests/modules/biobambam/bamsormadup/test.yml Co-authored-by: James A. Fellows Yates * test meta.yaml: remove md5sums * Tool bamsormadup: - add (optional) reference input - add bam index ouput - add cram output option - make metrics output: more general * fix input and output formats * update input file description * drop sam output, goes against nf-core regs; add input check for cram files * fix typo * Update modules/biobambam/bamsormadup/main.nf Co-authored-by: James A. Fellows Yates * improve ref fasta name * fix if else shorthand * fix syntax error * kind of fix tests * set fixed suffix for metrics file to keep it in line with picard and bammarkduplicates2 * fix command line * update test.yml * add support for multiple input bams * Update modules/biobambam/bamsormadup/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/biobambam/bamsormadup/meta.yml Co-authored-by: James A. Fellows Yates * Update tests/modules/biobambam/bamsormadup/test.yml Co-authored-by: James A. Fellows Yates Co-authored-by: James A. Fellows Yates --- modules/biobambam/bamsormadup/main.nf | 46 ++++++++++++++++ modules/biobambam/bamsormadup/meta.yml | 52 +++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/biobambam/bamsormadup/main.nf | 15 ++++++ .../biobambam/bamsormadup/nextflow.config | 5 ++ tests/modules/biobambam/bamsormadup/test.yml | 11 ++++ 6 files changed, 133 insertions(+) create mode 100644 modules/biobambam/bamsormadup/main.nf create mode 100644 modules/biobambam/bamsormadup/meta.yml create mode 100644 tests/modules/biobambam/bamsormadup/main.nf create mode 100644 tests/modules/biobambam/bamsormadup/nextflow.config create mode 100644 tests/modules/biobambam/bamsormadup/test.yml diff --git a/modules/biobambam/bamsormadup/main.nf b/modules/biobambam/bamsormadup/main.nf new file mode 100644 index 00000000..b9e28e43 --- /dev/null +++ b/modules/biobambam/bamsormadup/main.nf @@ -0,0 +1,46 @@ +process BIOBAMBAM_BAMSORMADUP { + tag "$meta.id" + label "process_medium" + + conda (params.enable_conda ? "bioconda::biobambam=2.0.183" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/biobambam:2.0.183--h9f5acd7_1' : 'quay.io/biocontainers/biobambam:2.0.183--h9f5acd7_1'}" + + input: + tuple val(meta), path(bams) + path(fasta) + + output: + tuple val(meta), path("*.{bam,cram}") ,emit: bam + tuple val(meta), path("*.bam.bai") ,optional:true, emit: bam_index + tuple val(meta), path("*.metrics.txt") ,emit: metrics + path "versions.yml" ,emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def suffix = args.contains("outputformat=cram") ? "cram" : "bam" + def input_string = bams.join(" I=") + + if (args.contains("outputformat=cram") && reference == null) error "Reference required for CRAM output." + + """ + bamcat \\ + I=${input_string} \\ + level=0 \\ + | bamsormadup \\ + $args \\ + M=${prefix}.metrics.txt \\ + tmpfile=$prefix \\ + threads=$task.cpus \\ + > ${prefix}.${suffix} + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + bamcat: \$(echo \$(bamsormadup --version 2>&1) | sed 's/^This is biobambam2 version //; s/..biobambam2 is .*\$//' ) + bamsormadup: \$(echo \$(bamsormadup --version 2>&1) | sed 's/^This is biobambam2 version //; s/..biobambam2 is .*\$//' ) + END_VERSIONS + """ +} diff --git a/modules/biobambam/bamsormadup/meta.yml b/modules/biobambam/bamsormadup/meta.yml new file mode 100644 index 00000000..39acf3b3 --- /dev/null +++ b/modules/biobambam/bamsormadup/meta.yml @@ -0,0 +1,52 @@ +name: biobambam_bamsormadup +description: Parallel sorting and duplicate marking +keywords: + - markduplicates + - sort + - bam + - cram +tools: + - biobambam: + description: | + biobambam is a set of tools for early stage alignment file processing. + homepage: https://gitlab.com/german.tischler/biobambam2 + documentation: https://gitlab.com/german.tischler/biobambam2/-/blob/master/README.md + doi: 10.1186/1751-0473-9-13 + licence: ["GPL v3"] +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bams: + type: file + description: List containing 1 or more bam files + - fasta: + type: file + description: Reference genome in FASTA format (optional) + pattern: "*.{fa,fasta}" +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: BAM/CRAM file with duplicate reads marked/removed + pattern: "*.{bam,cram}" + - bam_index: + type: file + description: BAM index file + pattern: "*.{bai}" + - metrics: + type: file + description: Duplicate metrics file generated by biobambam + pattern: "*.{metrics.txt}" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" +authors: + - "@matthdsm" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 6d66f230..d47b95c4 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -214,6 +214,10 @@ biobambam/bammarkduplicates2: - modules/biobambam/bammarkduplicates2/** - tests/modules/biobambam/bammarkduplicates2/** +biobambam/bamsormadup: + - modules/biobambam/bamsormadup/** + - tests/modules/biobambam/bamsormadup/** + biscuit/align: - modules/biscuit/index/** - modules/biscuit/align/** diff --git a/tests/modules/biobambam/bamsormadup/main.nf b/tests/modules/biobambam/bamsormadup/main.nf new file mode 100644 index 00000000..741a4433 --- /dev/null +++ b/tests/modules/biobambam/bamsormadup/main.nf @@ -0,0 +1,15 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { BIOBAMBAM_BAMSORMADUP } from '../../../../modules/biobambam/bamsormadup/main.nf' + +workflow test_biobambam_bamsormadup { + + input = [ + [ id:'test', single_end:false ], // meta map + [file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true), file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true)], + ] + + BIOBAMBAM_BAMSORMADUP ( input, [] ) +} diff --git a/tests/modules/biobambam/bamsormadup/nextflow.config b/tests/modules/biobambam/bamsormadup/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/biobambam/bamsormadup/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/biobambam/bamsormadup/test.yml b/tests/modules/biobambam/bamsormadup/test.yml new file mode 100644 index 00000000..a7a14202 --- /dev/null +++ b/tests/modules/biobambam/bamsormadup/test.yml @@ -0,0 +1,11 @@ +- name: biobambam bamsormadup test_biobambam_bamsormadup + command: nextflow run tests/modules/biobambam/bamsormadup -entry test_biobambam_bamsormadup -c tests/config/nextflow.config + tags: + - biobambam/bamsormadup + - biobambam + files: + - path: output/biobambam/test.bam + md5sum: 243a77fb0642fd46bb16a4d3432d19dc + - path: output/biobambam/test.metrics.txt + md5sum: 1721879bea1f3888ecd33b35e6ee0e72 + - path: output/biobambam/versions.yml From d2726fcf75063960f06b36d2229a4c0966614108 Mon Sep 17 00:00:00 2001 From: Sofia Stamouli <91951607+sofstam@users.noreply.github.com> Date: Thu, 7 Apr 2022 11:46:34 +0200 Subject: [PATCH 099/283] Update centrifuge/centrifuge (#1495) --- modules/centrifuge/{ => centrifuge}/main.nf | 10 ++++------ modules/centrifuge/{ => centrifuge}/meta.yml | 14 ++------------ tests/config/pytest_modules.yml | 6 +++--- tests/modules/centrifuge/{ => centrifuge}/main.nf | 15 +++++++-------- .../centrifuge/{ => centrifuge}/nextflow.config | 0 .../modules/centrifuge/{ => centrifuge}/test.yml | 12 ++++++------ 6 files changed, 22 insertions(+), 35 deletions(-) rename modules/centrifuge/{ => centrifuge}/main.nf (88%) rename modules/centrifuge/{ => centrifuge}/meta.yml (82%) rename tests/modules/centrifuge/{ => centrifuge}/main.nf (66%) rename tests/modules/centrifuge/{ => centrifuge}/nextflow.config (100%) rename tests/modules/centrifuge/{ => centrifuge}/test.yml (51%) diff --git a/modules/centrifuge/main.nf b/modules/centrifuge/centrifuge/main.nf similarity index 88% rename from modules/centrifuge/main.nf rename to modules/centrifuge/centrifuge/main.nf index c9ec377b..3d23fc96 100644 --- a/modules/centrifuge/main.nf +++ b/modules/centrifuge/centrifuge/main.nf @@ -1,4 +1,4 @@ -process CENTRIFUGE { +process CENTRIFUGE_CENTRIFUGE { tag "$meta.id" label 'process_high' @@ -10,7 +10,6 @@ process CENTRIFUGE { input: tuple val(meta), path(reads) path db - val db_name val save_unaligned val save_aligned val sam_format @@ -18,7 +17,6 @@ process CENTRIFUGE { output: tuple val(meta), path('*report.txt') , emit: report tuple val(meta), path('*results.txt') , emit: results - tuple val(meta), path('*kreport.txt') , emit: kreport tuple val(meta), path('*.sam') , optional: true, emit: sam tuple val(meta), path('*.mapped.fastq{,.1,.2}.gz') , optional: true, emit: fastq_mapped tuple val(meta), path('*.unmapped.fastq{,.1,.2}.gz') , optional: true, emit: fastq_unmapped @@ -31,7 +29,6 @@ process CENTRIFUGE { def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" def paired = meta.single_end ? "-U ${reads}" : "-1 ${reads[0]} -2 ${reads[1]}" - def db_name = db.toString().replace(".tar.gz","") def unaligned = '' def aligned = '' if (meta.single_end) { @@ -43,8 +40,10 @@ process CENTRIFUGE { } def sam_output = sam_format ? "--out-fmt 'sam'" : '' """ + ## we add "-no-name ._" to ensure silly Mac OSX metafiles files aren't included + db_name=`find -L ${db} -name "*.1.cf" -not -name "._*" | sed 's/.1.cf//'` centrifuge \\ - -x ${db}/${db_name} \\ + -x \$db_name \\ -p $task.cpus \\ $paired \\ --report-file ${prefix}.report.txt \\ @@ -53,7 +52,6 @@ process CENTRIFUGE { $aligned \\ $sam_output \\ $args - centrifuge-kreport -x $db_name ${prefix}.results.txt > ${prefix}.kreport.txt cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/modules/centrifuge/meta.yml b/modules/centrifuge/centrifuge/meta.yml similarity index 82% rename from modules/centrifuge/meta.yml rename to modules/centrifuge/centrifuge/meta.yml index aabb465f..a252c00c 100644 --- a/modules/centrifuge/meta.yml +++ b/modules/centrifuge/centrifuge/meta.yml @@ -1,4 +1,4 @@ -name: centrifuge +name: centrifuge_centrifuge description: Classifies metagenomic sequence data keywords: - classify @@ -25,11 +25,7 @@ input: respectively. - db: type: directory - description: Centrifuge database in .tar.gz format - pattern: "*.tar.gz" - - db_name: - type: string - description: Centrifuge database filenames without the suffix ".cf" + description: Path to directory containing centrifuge database files - save_unaligned: type: value description: If true unmapped fastq files are saved @@ -52,12 +48,6 @@ output: description: | File containing classification results pattern: "*.{results.txt}" - - kreport: - type: file - description: | - File containing kraken-style report from centrifuge - out files. - pattern: "*.{kreport.txt}" - fastq_unmapped: type: file description: Unmapped fastq files diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index d47b95c4..64779036 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -395,9 +395,9 @@ cellranger/mkref: - modules/cellranger/gtf/** - tests/modules/cellranger/gtf/** -centrifuge: - - modules/centrifuge/** - - tests/modules/centrifuge/** +centrifuge/centrifuge: + - modules/centrifuge/centrifuge/** + - tests/modules/centrifuge/centrifuge/** checkm/lineagewf: - modules/checkm/lineagewf/** diff --git a/tests/modules/centrifuge/main.nf b/tests/modules/centrifuge/centrifuge/main.nf similarity index 66% rename from tests/modules/centrifuge/main.nf rename to tests/modules/centrifuge/centrifuge/main.nf index 37393ce5..7e44bd80 100644 --- a/tests/modules/centrifuge/main.nf +++ b/tests/modules/centrifuge/centrifuge/main.nf @@ -2,37 +2,36 @@ nextflow.enable.dsl = 2 -include { UNTAR } from '../../../modules/untar/main.nf' -include { CENTRIFUGE } from '../../../modules/centrifuge/main.nf' +include { UNTAR } from '../../../../modules/untar/main.nf' +include { CENTRIFUGE_CENTRIFUGE } from '../../../../modules/centrifuge/centrifuge/main.nf' -workflow test_centrifuge_single_end { +workflow test_centrifuge_centrifuge_single_end { input = [ [ id:'test', single_end:true ], // meta map [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] ] db = [ [], file('https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/delete_me/minigut_cf.tar.gz', checkIfExists: true) ] - db_name = "minigut_cf" save_unaligned = true save_aligned = false sam_format = false UNTAR ( db ) - CENTRIFUGE ( input, UNTAR.out.untar.map{ it[1] },db_name, save_unaligned, save_aligned, sam_format ) + CENTRIFUGE_CENTRIFUGE ( input, UNTAR.out.untar.map{ it[1] }, save_unaligned, save_aligned, sam_format ) } -workflow test_centrifuge_paired_end { +workflow test_centrifuge_centrifuge_paired_end { input = [ [ id:'test', single_end:false ], // meta map [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] ] db = [ [], file('https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/delete_me/minigut_cf.tar.gz', checkIfExists: true) ] - db_name = "minigut_cf" + //db_name = "minigut_cf" save_unaligned = true save_aligned = false sam_format = false UNTAR ( db ) - CENTRIFUGE ( input, UNTAR.out.untar.map{ it[1] }, db_name, save_unaligned, save_aligned, sam_format ) + CENTRIFUGE_CENTRIFUGE ( input, UNTAR.out.untar.map{ it[1] }, save_unaligned, save_aligned, sam_format ) } diff --git a/tests/modules/centrifuge/nextflow.config b/tests/modules/centrifuge/centrifuge/nextflow.config similarity index 100% rename from tests/modules/centrifuge/nextflow.config rename to tests/modules/centrifuge/centrifuge/nextflow.config diff --git a/tests/modules/centrifuge/test.yml b/tests/modules/centrifuge/centrifuge/test.yml similarity index 51% rename from tests/modules/centrifuge/test.yml rename to tests/modules/centrifuge/centrifuge/test.yml index a7b4360b..641ca7ef 100644 --- a/tests/modules/centrifuge/test.yml +++ b/tests/modules/centrifuge/centrifuge/test.yml @@ -1,20 +1,20 @@ -- name: centrifuge test_centrifuge_single_end - command: nextflow run tests/modules/centrifuge -entry test_centrifuge_single_end -c tests/config/nextflow.config +- name: centrifuge centrifuge test_centrifuge_centrifuge_single_end + command: nextflow run tests/modules/centrifuge/centrifuge -entry test_centrifuge_centrifuge_single_end -c tests/config/nextflow.config tags: - centrifuge + - centrifuge/centrifuge files: - - path: output/centrifuge/test.kreport.txt - path: output/centrifuge/test.report.txt - path: output/centrifuge/test.results.txt - path: output/centrifuge/test.unmapped.fastq.gz - path: output/centrifuge/versions.yml -- name: centrifuge test_centrifuge_paired_end - command: nextflow run tests/modules/centrifuge -entry test_centrifuge_paired_end -c tests/config/nextflow.config +- name: centrifuge centrifuge test_centrifuge_centrifuge_paired_end + command: nextflow run tests/modules/centrifuge/centrifuge -entry test_centrifuge_centrifuge_paired_end -c tests/config/nextflow.config tags: - centrifuge + - centrifuge/centrifuge files: - - path: output/centrifuge/test.kreport.txt - path: output/centrifuge/test.report.txt - path: output/centrifuge/test.results.txt - path: output/centrifuge/test.unmapped.fastq.1.gz From f07936741656de27060de4a72b1f5292e25d4f98 Mon Sep 17 00:00:00 2001 From: Lucpen Date: Thu, 7 Apr 2022 13:50:58 +0200 Subject: [PATCH 100/283] Picard liftover vcf (#1431) * Building Picard liftovervcf module * Building Picard liftovervcf module_test * Building Picard liftovervcf pytest * Module for picard liftover vcf created * Fixed files after linting test * Fixed trailing whitespace * Checked files with prettier * further formatting with prettier * Fixed test.yml * Fixed input variable names * Changed contain test.liftef.vcf * Changed contain in test.yml test.liftef.vcf * Run prittier * Going back to previous version of test.yml * downgrading picard to 2.26.10 from 2.26.11 * Update modules/picard/liftovervcf/main.nf Co-authored-by: Sateesh Peri <33637490+sateeshperi@users.noreply.github.com> * Update modules/picard/liftovervcf/main.nf Print available memory Co-authored-by: Sateesh Peri <33637490+sateeshperi@users.noreply.github.com> * Output from .vcf to .vcf.gz * Added spaces to align emit * Update modules/picard/liftovervcf/meta.yml Co-authored-by: Sateesh Peri <33637490+sateeshperi@users.noreply.github.com> * Update modules/picard/liftovervcf/meta.yml Co-authored-by: Sateesh Peri <33637490+sateeshperi@users.noreply.github.com> * Update modules/picard/liftovervcf/meta.yml Co-authored-by: Sateesh Peri <33637490+sateeshperi@users.noreply.github.com> * Removing md5sum test Co-authored-by: jemten Co-authored-by: Sateesh Peri <33637490+sateeshperi@users.noreply.github.com> Co-authored-by: Maxime U. Garcia --- modules/picard/liftovervcf/main.nf | 49 +++++++++++++++++ modules/picard/liftovervcf/meta.yml | 55 +++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/picard/liftovervcf/main.nf | 17 ++++++ .../picard/liftovervcf/nextflow.config | 5 ++ tests/modules/picard/liftovervcf/test.yml | 11 ++++ 6 files changed, 141 insertions(+) create mode 100644 modules/picard/liftovervcf/main.nf create mode 100644 modules/picard/liftovervcf/meta.yml create mode 100644 tests/modules/picard/liftovervcf/main.nf create mode 100644 tests/modules/picard/liftovervcf/nextflow.config create mode 100644 tests/modules/picard/liftovervcf/test.yml diff --git a/modules/picard/liftovervcf/main.nf b/modules/picard/liftovervcf/main.nf new file mode 100644 index 00000000..cdbd637e --- /dev/null +++ b/modules/picard/liftovervcf/main.nf @@ -0,0 +1,49 @@ +process PICARD_LIFTOVERVCF { + tag "$meta.id" + label 'process_low' + + conda (params.enable_conda ? "bioconda::picard=2.26.10" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/picard:2.26.10--hdfd78af_0' : + 'quay.io/biocontainers/picard:2.26.10--hdfd78af_0' }" + + input: + tuple val(meta), path(input_vcf) + path dict + path chain + path fasta + + output: + tuple val(meta), path("*lifted.vcf.gz") , emit: vcf_lifted + tuple val(meta), path("*unlifted.vcf.gz"), emit: vcf_unlifted + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def avail_mem = 1 + if (!task.memory) { + log.info '[Picard LiftoverVcf] Available memory not known - defaulting to 1GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } + """ + picard \\ + -Xmx${avail_mem}g \\ + LiftoverVcf \\ + $args \\ + I=$input_vcf \\ + O=${prefix}.lifted.vcf.gz \\ + CHAIN=$chain \\ + REJECT=${prefix}.unlifted.vcf.gz \\ + R=$fasta + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + picard: \$(picard LiftoverVcf --version 2>&1 | grep -o 'Version.*' | cut -f2- -d:) + END_VERSIONS + """ +} diff --git a/modules/picard/liftovervcf/meta.yml b/modules/picard/liftovervcf/meta.yml new file mode 100644 index 00000000..55f04963 --- /dev/null +++ b/modules/picard/liftovervcf/meta.yml @@ -0,0 +1,55 @@ +name: picard_liftovervcf +description: convert between genome builds +keywords: + - liftOver + - picard +tools: + - picard: + description: Move annotations from one assembly to another + homepage: https://gatk.broadinstitute.org/hc/en-us/articles/360037060932-LiftoverVcf-Picard + documentation: https://gatk.broadinstitute.org/hc/en-us/articles/360037060932-LiftoverVcf-Picard + tool_dev_url: https://github.com/broadinstitute/picard + doi: "" + licence: ["MIT"] + +input: + - meta: + type: map + description: Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - input_vcf: + type: file + description: VCF file + pattern: "*.{vcf,vcf.gz}" + - chain: + type: file + description: The liftover chain file + - fasta: + type: file + description: fasta file + pattern: "*.fasta" + - dict: + type: file + description: dictionary for fasta file + pattern: "*.{dict}" + +output: + - meta: + type: map + description: Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - vcf_lifted: + type: file + description: VCF file containing successfully lifted variants + pattern: "*.{lifted.vcf.gz}" + - vcf_unlifted: + type: file + description: VCF file containing unsuccessfully lifted variants + pattern: "*.{unlifted.vcf.gz}" + +authors: + - "@lucpen" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 64779036..364d1f53 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -1351,6 +1351,10 @@ picard/fixmateinformation: - modules/picard/fixmateinformation/** - tests/modules/picard/fixmateinformation/** +picard/liftovervcf: + - modules/picard/liftovervcf/** + - tests/modules/picard/liftovervcf/** + picard/markduplicates: - modules/picard/markduplicates/** - tests/modules/picard/markduplicates/** diff --git a/tests/modules/picard/liftovervcf/main.nf b/tests/modules/picard/liftovervcf/main.nf new file mode 100644 index 00000000..8aee8273 --- /dev/null +++ b/tests/modules/picard/liftovervcf/main.nf @@ -0,0 +1,17 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { PICARD_LIFTOVERVCF } from '../../../../modules/picard/liftovervcf/main.nf' + +workflow test_picard_liftovervcf { + + input_vcf = [ [ id:'test' ], + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf'], checkIfExists: true) + ] + dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + chain = file(params.test_data['homo_sapiens']['genome']['genome_chain_gz'], checkIfExists: true) + fasta = [ file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) ] + + PICARD_LIFTOVERVCF ( input_vcf, dict, chain, fasta ) +} diff --git a/tests/modules/picard/liftovervcf/nextflow.config b/tests/modules/picard/liftovervcf/nextflow.config new file mode 100644 index 00000000..e1581bb9 --- /dev/null +++ b/tests/modules/picard/liftovervcf/nextflow.config @@ -0,0 +1,5 @@ +process { + ext.args = "WARN_ON_MISSING_CONTIG=true" + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/picard/liftovervcf/test.yml b/tests/modules/picard/liftovervcf/test.yml new file mode 100644 index 00000000..b1b30d5d --- /dev/null +++ b/tests/modules/picard/liftovervcf/test.yml @@ -0,0 +1,11 @@ +- name: picard liftovervcf test_picard_liftovervcf + command: nextflow run tests/modules/picard/liftovervcf -entry test_picard_liftovervcf -c tests/config/nextflow.config + tags: + - picard/liftovervcf + - picard + files: + - path: output/picard/test.lifted.vcf.gz + contains: + - "chr22" + - path: output/picard/test.unlifted.vcf.gz + - path: output/picard/versions.yml From 9ae34a01d1747019fd37753ff4cafb05aec35a2b Mon Sep 17 00:00:00 2001 From: FriederikeHanssen Date: Fri, 8 Apr 2022 11:43:40 +0200 Subject: [PATCH 101/283] Fix Controlfreec: Add stub runs to test single sample input & make conda work with R scripts (#1504) * Fix typo * Add stub runs for testing input without matched normals * Add missing -stub-run * remove empty file checksum tests and change workflow names * test controlfreec naming * fix output file names * fix output file names * fix output file names * fix conda and container path difference for R scripts * update tar version to work with conda * fix version number in docker * try to fix path to script, pretty sure it won't work * try new ways to set path with wildcard * try which * add which but with escape * remove comment --- .../controlfreec/assesssignificance/main.nf | 13 ++++++- modules/controlfreec/freec/main.nf | 20 ++++++++++- modules/controlfreec/freec2bed/main.nf | 11 ++++++ modules/controlfreec/freec2circos/main.nf | 11 ++++++ modules/controlfreec/makegraph/main.nf | 14 +++++++- modules/untar/main.nf | 6 ++-- .../controlfreec/assesssignificance/main.nf | 35 ++++++++++++++++++ .../controlfreec/assesssignificance/test.yml | 10 +++++- tests/modules/controlfreec/freec/main.nf | 33 +++++++++++++++++ tests/modules/controlfreec/freec/test.yml | 16 ++++++++- tests/modules/controlfreec/freec2bed/main.nf | 36 ++++++++++++++++++- tests/modules/controlfreec/freec2bed/test.yml | 8 +++++ .../modules/controlfreec/freec2circos/main.nf | 34 ++++++++++++++++++ .../controlfreec/freec2circos/test.yml | 8 +++++ tests/modules/controlfreec/makegraph/main.nf | 35 ++++++++++++++++++ tests/modules/controlfreec/makegraph/test.yml | 10 ++++++ 16 files changed, 291 insertions(+), 9 deletions(-) diff --git a/modules/controlfreec/assesssignificance/main.nf b/modules/controlfreec/assesssignificance/main.nf index f85a3c7f..4bdb00b3 100644 --- a/modules/controlfreec/assesssignificance/main.nf +++ b/modules/controlfreec/assesssignificance/main.nf @@ -21,7 +21,7 @@ process CONTROLFREEC_ASSESSSIGNIFICANCE { def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" """ - cat /usr/local/bin/assess_significance.R | R --slave --args ${cnvs} ${ratio} + cat \$(which assess_significance.R) | R --slave --args ${cnvs} ${ratio} mv *.p.value.txt ${prefix}.p.value.txt @@ -30,4 +30,15 @@ process CONTROLFREEC_ASSESSSIGNIFICANCE { controlfreec: \$(echo \$(freec -version 2>&1) | sed 's/^.*Control-FREEC //; s/:.*\$//' | sed -e "s/Control-FREEC v//g" ) END_VERSIONS """ + + stub: + def prefix = task.ext.prefix ?: "${meta.id}" + """ + touch ${prefix}.p.value.txt + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + controlfreec: \$(echo \$(freec -version 2>&1) | sed 's/^.*Control-FREEC //; s/:.*\$//' | sed -e "s/Control-FREEC v//g" ) + END_VERSIONS + """ } diff --git a/modules/controlfreec/freec/main.nf b/modules/controlfreec/freec/main.nf index eb66eeaa..857ffdee 100644 --- a/modules/controlfreec/freec/main.nf +++ b/modules/controlfreec/freec/main.nf @@ -21,7 +21,7 @@ process CONTROLFREEC_FREEC { output: tuple val(meta), path("*_ratio.BedGraph") , emit: bedgraph, optional: true - tuple val(meta), path("*_control.cpn") , emit: control_cpn + tuple val(meta), path("*_control.cpn") , emit: control_cpn, optional: true tuple val(meta), path("*_sample.cpn") , emit: sample_cpn tuple val(meta), path("GC_profile.*.cpn") , emit: gcprofile_cpn, optional:true tuple val(meta), path("*_BAF.txt") , emit: BAF @@ -155,4 +155,22 @@ process CONTROLFREEC_FREEC { controlfreec: \$(echo \$(freec -version 2>&1) | sed 's/^.*Control-FREEC //; s/:.*\$//' | sed -e "s/Control-FREEC v//g" ) END_VERSIONS """ + + stub: + def prefix = task.ext.prefix ?: "${meta.id}" + """ + touch ${prefix}_ratio.BedGraph + touch ${prefix}_sample.cpn + touch GC_profile.${prefix}.cpn + touch ${prefix}_BAF.txt + touch ${prefix}_CNVs + touch ${prefix}_info.txt + touch ${prefix}_ratio.txt + touch config.txt + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + controlfreec: \$(echo \$(freec -version 2>&1) | sed 's/^.*Control-FREEC //; s/:.*\$//' | sed -e "s/Control-FREEC v//g" ) + END_VERSIONS + """ } diff --git a/modules/controlfreec/freec2bed/main.nf b/modules/controlfreec/freec2bed/main.nf index 880e4716..aefc200e 100644 --- a/modules/controlfreec/freec2bed/main.nf +++ b/modules/controlfreec/freec2bed/main.nf @@ -28,4 +28,15 @@ process CONTROLFREEC_FREEC2BED { controlfreec: \$(echo \$(freec -version 2>&1) | sed 's/^.*Control-FREEC //; s/:.*\$//' | sed -e "s/Control-FREEC v//g" ) END_VERSIONS """ + + stub: + def prefix = task.ext.prefix ?: "${meta.id}" + """ + touch ${prefix}.bed + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + controlfreec: \$(echo \$(freec -version 2>&1) | sed 's/^.*Control-FREEC //; s/:.*\$//' | sed -e "s/Control-FREEC v//g" ) + END_VERSIONS + """ } diff --git a/modules/controlfreec/freec2circos/main.nf b/modules/controlfreec/freec2circos/main.nf index 8879d4c0..8f9be300 100644 --- a/modules/controlfreec/freec2circos/main.nf +++ b/modules/controlfreec/freec2circos/main.nf @@ -28,4 +28,15 @@ process CONTROLFREEC_FREEC2CIRCOS { controlfreec: \$(echo \$(freec -version 2>&1) | sed 's/^.*Control-FREEC //; s/:.*\$//' | sed -e "s/Control-FREEC v//g" ) END_VERSIONS """ + + stub: + def prefix = task.ext.prefix ?: "${meta.id}" + """ + touch ${prefix}.circos.txt + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + controlfreec: \$(echo \$(freec -version 2>&1) | sed 's/^.*Control-FREEC //; s/:.*\$//' | sed -e "s/Control-FREEC v//g" ) + END_VERSIONS + """ } diff --git a/modules/controlfreec/makegraph/main.nf b/modules/controlfreec/makegraph/main.nf index 9a0c7281..a8954d72 100644 --- a/modules/controlfreec/makegraph/main.nf +++ b/modules/controlfreec/makegraph/main.nf @@ -25,12 +25,24 @@ process CONTROLFREEC_MAKEGRAPH { def prefix = task.ext.prefix ?: "${meta.id}" def baf = baf ?: "" """ - cat /usr/local/bin/makeGraph.R | R --slave --args ${args} ${ratio} ${baf} + cat \$(which makeGraph.R) | R --slave --args ${args} ${ratio} ${baf} mv *_BAF.txt.png ${prefix}_BAF.png mv *_ratio.txt.log2.png ${prefix}_ratio.log2.png mv *_ratio.txt.png ${prefix}_ratio.png + cat <<-END_VERSIONS > versions.yml + "${task.process}": + controlfreec: \$(echo \$(freec -version 2>&1) | sed 's/^.*Control-FREEC //; s/:.*\$//' | sed -e "s/Control-FREEC v//g" ) + END_VERSIONS + """ + + stub: + def prefix = task.ext.prefix ?: "${meta.id}" + """ + touch ${prefix}_BAF.png + touch ${prefix}_ratio.log2.png + touch ${prefix}_ratio.png cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/modules/untar/main.nf b/modules/untar/main.nf index 5aa6aa7f..bbfa0bfe 100644 --- a/modules/untar/main.nf +++ b/modules/untar/main.nf @@ -2,10 +2,10 @@ process UNTAR { tag "$archive" label 'process_low' - conda (params.enable_conda ? "conda-forge::tar=1.32" : null) + conda (params.enable_conda ? "conda-forge::tar=1.34" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://containers.biocontainers.pro/s3/SingImgsRepo/biocontainers/v1.2.0_cv1/biocontainers_v1.2.0_cv1.img' : - 'biocontainers/biocontainers:v1.2.0_cv1' }" + 'https://containers.biocontainers.pro/s3/SingImgsRepo/biocontainers/v1.2.0_cv2/biocontainers_v1.2.0_cv2.img' : + 'biocontainers/biocontainers:v1.2.0_cv2' }" input: tuple val(meta), path(archive) diff --git a/tests/modules/controlfreec/assesssignificance/main.nf b/tests/modules/controlfreec/assesssignificance/main.nf index f8d8aa1d..e5ed1bf7 100644 --- a/tests/modules/controlfreec/assesssignificance/main.nf +++ b/tests/modules/controlfreec/assesssignificance/main.nf @@ -40,3 +40,38 @@ workflow test_controlfreec_assesssignificance { sig_in = CONTROLFREEC_FREEC.out.CNV.join(CONTROLFREEC_FREEC.out.ratio) CONTROLFREEC_ASSESSSIGNIFICANCE ( sig_in ) } + +workflow test_controlfreec_assesssignificance_single { + + input = [ + [ id:'test', single_end:false, sex:'XX' ], // meta map + [], + file(params.test_data['homo_sapiens']['illumina']['test2_mpileup'], checkIfExists: true), + [],[],[],[] + ] + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta_fai'], checkIfExists: true) + + dbsnp = file(params.test_data['homo_sapiens']['genome']['dbsnp_138_hg38_21_vcf_gz'], checkIfExists: true) + dbsnp_tbi = file(params.test_data['homo_sapiens']['genome']['dbsnp_138_hg38_21_vcf_gz_tbi'], checkIfExists: true) + + chrfiles = [ [], file(params.test_data['homo_sapiens']['genome']['genome_21_chromosomes_dir'], checkIfExists: true) ] + target_bed = file(params.test_data['homo_sapiens']['genome']['genome_21_multi_interval_bed'], checkIfExists: true) + + UNTAR(chrfiles) + CONTROLFREEC_FREEC (input, + fasta, + fai, + [], + dbsnp, + dbsnp_tbi, + UNTAR.out.untar.map{ it[1] }, + [], + target_bed, + [] + ) + + sig_in = CONTROLFREEC_FREEC.out.CNV.join(CONTROLFREEC_FREEC.out.ratio) + CONTROLFREEC_ASSESSSIGNIFICANCE ( sig_in ) +} diff --git a/tests/modules/controlfreec/assesssignificance/test.yml b/tests/modules/controlfreec/assesssignificance/test.yml index f8393330..19e54acf 100644 --- a/tests/modules/controlfreec/assesssignificance/test.yml +++ b/tests/modules/controlfreec/assesssignificance/test.yml @@ -7,4 +7,12 @@ - path: output/controlfreec/test.p.value.txt md5sum: 44e23b916535fbc1a3f47b57fad292df - path: output/controlfreec/versions.yml - md5sum: 0aa42fed10d61e4570fe1e0e83ffe932 + +- name: controlfreec assesssignificance test_controlfreec_assesssignificance_single + command: nextflow run tests/modules/controlfreec/assesssignificance -entry test_controlfreec_assesssignificance_single -c tests/config/nextflow.config -stub-run + tags: + - controlfreec/assesssignificance + - controlfreec + files: + - path: output/controlfreec/test.p.value.txt + - path: output/controlfreec/versions.yml diff --git a/tests/modules/controlfreec/freec/main.nf b/tests/modules/controlfreec/freec/main.nf index d14c8f65..1f4a069b 100644 --- a/tests/modules/controlfreec/freec/main.nf +++ b/tests/modules/controlfreec/freec/main.nf @@ -36,3 +36,36 @@ workflow test_controlfreec_freec { [] ) } + +workflow test_controlfreec_freec_single { + + input = [ + [ id:'test2', single_end:false, sex:'XX' ], // meta map + [], + file(params.test_data['homo_sapiens']['illumina']['test2_mpileup'], checkIfExists: true), + [],[],[],[] + ] + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta_fai'], checkIfExists: true) + + dbsnp = file(params.test_data['homo_sapiens']['genome']['dbsnp_138_hg38_21_vcf_gz'], checkIfExists: true) + dbsnp_tbi = file(params.test_data['homo_sapiens']['genome']['dbsnp_138_hg38_21_vcf_gz_tbi'], checkIfExists: true) + + chrfiles = [ [], file(params.test_data['homo_sapiens']['genome']['genome_21_chromosomes_dir'], checkIfExists: true) ] + target_bed = file(params.test_data['homo_sapiens']['genome']['genome_21_multi_interval_bed'], checkIfExists: true) + + UNTAR(chrfiles) + CONTROLFREEC_FREEC (input, + fasta, + fai, + [], + dbsnp, + dbsnp_tbi, + UNTAR.out.untar.map{ it[1] }, + [], + target_bed, + [] + ) +} + diff --git a/tests/modules/controlfreec/freec/test.yml b/tests/modules/controlfreec/freec/test.yml index d50fc063..1bd4e3a4 100644 --- a/tests/modules/controlfreec/freec/test.yml +++ b/tests/modules/controlfreec/freec/test.yml @@ -20,4 +20,18 @@ - path: output/controlfreec/test2.mpileup.gz_sample.cpn md5sum: c80dad58a77b1d7ba6d273999f4b4b4b - path: output/controlfreec/versions.yml - md5sum: 3ab250a2ab3be22628124c7c65324651 + +- name: controlfreec test_controlfreec_freec_single + command: nextflow run tests/modules/controlfreec/freec -entry test_controlfreec_freec_single -c tests/config/nextflow.config -stub-run + tags: + - controlfreec + - controlfreec/freec + files: + - path: output/controlfreec/config.txt + - path: output/controlfreec/test2_BAF.txt + - path: output/controlfreec/test2_CNVs + - path: output/controlfreec/test2_info.txt + - path: output/controlfreec/test2_ratio.BedGraph + - path: output/controlfreec/test2_ratio.txt + - path: output/controlfreec/test2_sample.cpn + - path: output/controlfreec/versions.yml diff --git a/tests/modules/controlfreec/freec2bed/main.nf b/tests/modules/controlfreec/freec2bed/main.nf index df121832..c1b0f04e 100644 --- a/tests/modules/controlfreec/freec2bed/main.nf +++ b/tests/modules/controlfreec/freec2bed/main.nf @@ -8,7 +8,7 @@ include { UNTAR } from '../../../../modules/untar/main.nf' workflow test_controlfreec_freec2bed { - input = [ + input = [ [ id:'test', single_end:false, sex:'XX' ], // meta map file(params.test_data['homo_sapiens']['illumina']['test_mpileup'], checkIfExists: true), file(params.test_data['homo_sapiens']['illumina']['test2_mpileup'], checkIfExists: true), @@ -39,3 +39,37 @@ workflow test_controlfreec_freec2bed { CONTROLFREEC_FREEC2BED ( CONTROLFREEC_FREEC.out.ratio ) } + +workflow test_controlfreec_freec2bed_single { + + input = [ + [ id:'test', single_end:false, sex:'XX' ], // meta map + [], + file(params.test_data['homo_sapiens']['illumina']['test2_mpileup'], checkIfExists: true), + [],[],[],[] + ] + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta_fai'], checkIfExists: true) + + dbsnp = file(params.test_data['homo_sapiens']['genome']['dbsnp_138_hg38_21_vcf_gz'], checkIfExists: true) + dbsnp_tbi = file(params.test_data['homo_sapiens']['genome']['dbsnp_138_hg38_21_vcf_gz_tbi'], checkIfExists: true) + + chrfiles = [ [], file(params.test_data['homo_sapiens']['genome']['genome_21_chromosomes_dir'], checkIfExists: true) ] + target_bed = file(params.test_data['homo_sapiens']['genome']['genome_21_multi_interval_bed'], checkIfExists: true) + + UNTAR(chrfiles) + CONTROLFREEC_FREEC (input, + fasta, + fai, + [], + dbsnp, + dbsnp_tbi, + UNTAR.out.untar.map{ it[1] }, + [], + target_bed, + [] + ) + + CONTROLFREEC_FREEC2BED ( CONTROLFREEC_FREEC.out.ratio ) +} diff --git a/tests/modules/controlfreec/freec2bed/test.yml b/tests/modules/controlfreec/freec2bed/test.yml index 0198bac6..9abb3a54 100644 --- a/tests/modules/controlfreec/freec2bed/test.yml +++ b/tests/modules/controlfreec/freec2bed/test.yml @@ -6,3 +6,11 @@ files: - path: output/controlfreec/test.bed md5sum: abe10b7ce94ba903503e697394c17297 + +- name: controlfreec freec2bed test_controlfreec_freec2bed_single + command: nextflow run tests/modules/controlfreec/freec2bed -entry test_controlfreec_freec2bed_single -c tests/config/nextflow.config -stub-run + tags: + - controlfreec/freec2bed + - controlfreec + files: + - path: output/controlfreec/test.bed diff --git a/tests/modules/controlfreec/freec2circos/main.nf b/tests/modules/controlfreec/freec2circos/main.nf index 9b655f0e..6b34edb6 100644 --- a/tests/modules/controlfreec/freec2circos/main.nf +++ b/tests/modules/controlfreec/freec2circos/main.nf @@ -39,3 +39,37 @@ workflow test_controlfreec_freec2circos { CONTROLFREEC_FREEC2CIRCOS ( CONTROLFREEC_FREEC.out.ratio ) } + +workflow test_controlfreec_freec2circos_single { + + input = [ + [ id:'test', single_end:false, sex:'XX' ], // meta map + [], + file(params.test_data['homo_sapiens']['illumina']['test2_mpileup'], checkIfExists: true), + [],[],[],[] + ] + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta_fai'], checkIfExists: true) + + dbsnp = file(params.test_data['homo_sapiens']['genome']['dbsnp_138_hg38_21_vcf_gz'], checkIfExists: true) + dbsnp_tbi = file(params.test_data['homo_sapiens']['genome']['dbsnp_138_hg38_21_vcf_gz_tbi'], checkIfExists: true) + + chrfiles = [ [], file(params.test_data['homo_sapiens']['genome']['genome_21_chromosomes_dir'], checkIfExists: true) ] + target_bed = file(params.test_data['homo_sapiens']['genome']['genome_21_multi_interval_bed'], checkIfExists: true) + + UNTAR(chrfiles) + CONTROLFREEC_FREEC (input, + fasta, + fai, + [], + dbsnp, + dbsnp_tbi, + UNTAR.out.untar.map{ it[1] }, + [], + target_bed, + [] + ) + + CONTROLFREEC_FREEC2CIRCOS ( CONTROLFREEC_FREEC.out.ratio ) +} diff --git a/tests/modules/controlfreec/freec2circos/test.yml b/tests/modules/controlfreec/freec2circos/test.yml index 5758a828..c29111de 100644 --- a/tests/modules/controlfreec/freec2circos/test.yml +++ b/tests/modules/controlfreec/freec2circos/test.yml @@ -6,3 +6,11 @@ files: - path: output/controlfreec/test.circos.txt md5sum: 19cf35f2c36b46f717dc8342b8a5a645 + +- name: controlfreec freec2circos test_controlfreec_freec2circos_single + command: nextflow run tests/modules/controlfreec/freec2circos -entry test_controlfreec_freec2circos_single -c tests/config/nextflow.config -stub-run + tags: + - controlfreec + - controlfreec/freec2circos + files: + - path: output/controlfreec/test.circos.txt diff --git a/tests/modules/controlfreec/makegraph/main.nf b/tests/modules/controlfreec/makegraph/main.nf index ffea3d99..543216e1 100644 --- a/tests/modules/controlfreec/makegraph/main.nf +++ b/tests/modules/controlfreec/makegraph/main.nf @@ -40,3 +40,38 @@ workflow test_controlfreec_makegraph { makegraph_in = CONTROLFREEC_FREEC.out.ratio.join(CONTROLFREEC_FREEC.out.BAF) CONTROLFREEC_MAKEGRAPH ( makegraph_in ) } + +workflow test_controlfreec_makegraph_single { + + input = [ + [ id:'test', single_end:false, sex:'XX' ], // meta map + [], + file(params.test_data['homo_sapiens']['illumina']['test2_mpileup'], checkIfExists: true), + [],[],[],[] + ] + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta_fai'], checkIfExists: true) + + dbsnp = file(params.test_data['homo_sapiens']['genome']['dbsnp_138_hg38_21_vcf_gz'], checkIfExists: true) + dbsnp_tbi = file(params.test_data['homo_sapiens']['genome']['dbsnp_138_hg38_21_vcf_gz_tbi'], checkIfExists: true) + + chrfiles = [ [], file(params.test_data['homo_sapiens']['genome']['genome_21_chromosomes_dir'], checkIfExists: true) ] + target_bed = file(params.test_data['homo_sapiens']['genome']['genome_21_multi_interval_bed'], checkIfExists: true) + + UNTAR(chrfiles) + CONTROLFREEC_FREEC (input, + fasta, + fai, + [], + dbsnp, + dbsnp_tbi, + UNTAR.out.untar.map{ it[1] }, + [], + target_bed, + [] + ) + + makegraph_in = CONTROLFREEC_FREEC.out.ratio.join(CONTROLFREEC_FREEC.out.BAF) + CONTROLFREEC_MAKEGRAPH ( makegraph_in ) +} diff --git a/tests/modules/controlfreec/makegraph/test.yml b/tests/modules/controlfreec/makegraph/test.yml index 21e78766..02d1a165 100644 --- a/tests/modules/controlfreec/makegraph/test.yml +++ b/tests/modules/controlfreec/makegraph/test.yml @@ -10,3 +10,13 @@ md5sum: b3c7916b1b4951a0cc3da20d8e9e0262 - path: output/controlfreec/test_ratio.png md5sum: 1435b29536b3b1555b4c423f8f4fb000 + +- name: controlfreec makegraph test_controlfreec_makegraph_single + command: nextflow run tests/modules/controlfreec/makegraph -entry test_controlfreec_makegraph_single -c tests/config/nextflow.config -stub-run + tags: + - controlfreec + - controlfreec/makegraph + files: + - path: output/controlfreec/test_BAF.png + - path: output/controlfreec/test_ratio.log2.png + - path: output/controlfreec/test_ratio.png From f57f085912a2b158eb224c21aeef45722a797aa6 Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Fri, 8 Apr 2022 14:41:08 +0200 Subject: [PATCH 102/283] new tool snap-aligner/index (#1506) * add snapaligner/index * output fixes * fix outputs * fix tests * update inputs * fix more bugs * fix linting * Update modules/snapaligner/index/main.nf Co-authored-by: James A. Fellows Yates * Update modules/snapaligner/index/main.nf Co-authored-by: James A. Fellows Yates * fix comments * fix indents * fix escaping Co-authored-by: James A. Fellows Yates --- modules/snapaligner/index/main.nf | 59 +++++++++++++++++++ modules/snapaligner/index/meta.yml | 39 ++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/snapaligner/index/main.nf | 9 +++ .../modules/snapaligner/index/nextflow.config | 5 ++ tests/modules/snapaligner/index/test.yml | 13 ++++ 6 files changed, 129 insertions(+) create mode 100644 modules/snapaligner/index/main.nf create mode 100644 modules/snapaligner/index/meta.yml create mode 100644 tests/modules/snapaligner/index/main.nf create mode 100644 tests/modules/snapaligner/index/nextflow.config create mode 100644 tests/modules/snapaligner/index/test.yml diff --git a/modules/snapaligner/index/main.nf b/modules/snapaligner/index/main.nf new file mode 100644 index 00000000..6dc2c958 --- /dev/null +++ b/modules/snapaligner/index/main.nf @@ -0,0 +1,59 @@ +process SNAPALIGNER_INDEX { + tag '$fasta' + label 'process_high' + + conda (params.enable_conda ? "bioconda::snap-aligner=2.0.1" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/snap-aligner:2.0.1--hd03093a_1': + 'quay.io/biocontainers/snap-aligner:2.0.1--hd03093a_1' }" + + input: + path fasta + path altcontigfile + path nonaltcontigfile + path altliftoverfile + + output: + path "snap/*" ,emit: index + path "versions.yml" ,emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def altcontigfile_arg = altcontigfile ? '-altContigFile ' + altcontigfile : '' + def nonaltcontigfile_arg = nonaltcontigfile ? '-nonAltContigFile ' + nonaltcontigfile : '' + def altliftoverfile_arg = altliftoverfile ? '-altLiftoverFile ' + altliftoverfile : '' + """ + mkdir snap + + snap-aligner \\ + index \\ + $fasta \\ + snap \\ + -t${task.cpus} \\ + $altcontigfile_arg \\ + $nonaltcontigfile_arg \\ + $altliftoverfile_arg \\ + $args + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + snapaligner: \$(snap-aligner 2>&1| head -n 1 | sed 's/^.*version //') + END_VERSIONS + """ + stub: + """ + mkdir snap + echo "Genome" > snap/Genome + echo "GenomeIndex" > snap/GenomeIndex + echo "GenomeIndexHash" > snap/GenomeIndexHash + echo "OverflowTable" > snap/OverflowTable + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + snapaligner: \$(snap-aligner 2>&1| head -n 1 | sed 's/^.*version //;s/\.\$//') + END_VERSIONS + """ +} diff --git a/modules/snapaligner/index/meta.yml b/modules/snapaligner/index/meta.yml new file mode 100644 index 00000000..6d5e0f19 --- /dev/null +++ b/modules/snapaligner/index/meta.yml @@ -0,0 +1,39 @@ +name: "snapaligner_index" +description: Create a SNAP index for reference genome +keywords: + - index + - fasta + - genome + - reference +tools: + - "snapaligner": + description: "Scalable Nucleotide Alignment Program -- a fast and accurate read aligner for high-throughput sequencing data" + homepage: "http://snap.cs.berkeley.edu" + documentation: "https://1drv.ms/b/s!AhuEg_0yZD86hcpblUt-muHKYsG8fA?e=R8ogug" + tool_dev_url: "https://github.com/amplab/snap" + doi: "10.1101/2021.11.23.469039" + licence: "['Apache v2']" +input: + - fasta: + type: file + description: Input genome fasta file + - altcontigfile: + type: file + description: Optional file with a list of alt contig names, one per line. + - nonaltcontigfile: + type: file + description: Optional file that contains a list of contigs (one per line) that will not be marked ALT regardless of size. + - altliftoverfile: + type: file + description: Optional file containing ALT-to-REF mappings (SAM format). e.g., hs38DH.fa.alt from bwa-kit. +output: + - index: + type: file + description: SNAP genome index files + pattern: "{Genome,GenomeIndex,GenomeIndexHash,OverflowTable}" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" +authors: + - "@matthdsm" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 364d1f53..31f62c78 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -1663,6 +1663,10 @@ sistr: - modules/sistr/** - tests/modules/sistr/** +snapaligner/index: + - modules/snapaligner/index/** + - tests/modules/snapaligner/index/** + snpdists: - modules/snpdists/** - tests/modules/snpdists/** diff --git a/tests/modules/snapaligner/index/main.nf b/tests/modules/snapaligner/index/main.nf new file mode 100644 index 00000000..4cebb876 --- /dev/null +++ b/tests/modules/snapaligner/index/main.nf @@ -0,0 +1,9 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { SNAPALIGNER_INDEX } from '../../../../modules/snapaligner/index/main.nf' + +workflow test_snapaligner_index { + SNAPALIGNER_INDEX ( file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true),[],[],[]) +} diff --git a/tests/modules/snapaligner/index/nextflow.config b/tests/modules/snapaligner/index/nextflow.config new file mode 100644 index 00000000..50f50a7a --- /dev/null +++ b/tests/modules/snapaligner/index/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} \ No newline at end of file diff --git a/tests/modules/snapaligner/index/test.yml b/tests/modules/snapaligner/index/test.yml new file mode 100644 index 00000000..2c4b4935 --- /dev/null +++ b/tests/modules/snapaligner/index/test.yml @@ -0,0 +1,13 @@ +- name: snapaligner index test_snapaligner_index + command: nextflow run tests/modules/snapaligner/index -entry test_snapaligner_index -c tests/config/nextflow.config + tags: + - snapaligner/index + - snapaligner + files: + - path: output/snapaligner/snap/Genome + md5sum: 7e189c954142ba37460332b467e34ed4 + - path: output/snapaligner/snap/GenomeIndex + md5sum: 298da8bcb1134f7b24379a792a7a46f8 + - path: output/snapaligner/snap/GenomeIndexHash + - path: output/snapaligner/snap/OverflowTable + - path: output/snapaligner/versions.yml From e19a9a2474c6609875b49d8140a7264e21a1beee Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Fri, 8 Apr 2022 15:54:15 +0200 Subject: [PATCH 103/283] new tool: staden_io_lib (#1499) * new tool: staden_io_lib * update docker containers * add test.yml * add fai index input * typo * fix version.yml * update md5sum * omit md5sum for cram * move scramble to submodule * add missing in/output * remove some comments Co-authored-by: Sateesh Peri <33637490+sateeshperi@users.noreply.github.com> --- modules/stadeniolib/scramble/main.nf | 61 +++++++++++++++++++ modules/stadeniolib/scramble/meta.yml | 58 ++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/stadeniolib/scramble/main.nf | 15 +++++ .../stadeniolib/scramble/nextflow.config | 5 ++ tests/modules/stadeniolib/scramble/test.yml | 7 +++ 6 files changed, 150 insertions(+) create mode 100644 modules/stadeniolib/scramble/main.nf create mode 100644 modules/stadeniolib/scramble/meta.yml create mode 100644 tests/modules/stadeniolib/scramble/main.nf create mode 100644 tests/modules/stadeniolib/scramble/nextflow.config create mode 100644 tests/modules/stadeniolib/scramble/test.yml diff --git a/modules/stadeniolib/scramble/main.nf b/modules/stadeniolib/scramble/main.nf new file mode 100644 index 00000000..e24fb2cb --- /dev/null +++ b/modules/stadeniolib/scramble/main.nf @@ -0,0 +1,61 @@ +process STADENIOLIB_SCRAMBLE { + tag "$meta.id" + label 'process_medium' + + conda (params.enable_conda ? "bioconda::staden_io_lib=1.14.14" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/staden_io_lib:1.14.14--h0d9da7e_3' : + 'quay.io/biocontainers/staden_io_lib:1.14.14--h0d9da7e_3' }" + + input: + tuple val(meta), path(reads) + path(fasta) + path(fai) + path(gzi) + + output: + tuple val(meta), path("*.cram") ,emit: cram + path "*.gzi" ,emit: gzi, optional: true + path "versions.yml" ,emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + + def inputformat = reads.getExtension + def outputformat = "cram" + if ("-O sam" in args) { + outputformat = "sam" + } else if ("-O bam" in args) { + outputformat = "bam" + } + + def reference = if fasta && fai : "--r ${fasta}" else "" + if (outputformat == "cram" && !reference) { + error "Cannot convert to CRAM without a reference" + } + + def gz_index = if gzi : "--g ${gzi}" else "" + if (outputformat == "cram" || outputformat == "sam") { + gz_index = "" + warning "Cannot use gzip index for CRAM or SAM output" + } + + """ + scramble \ + $args \ + -I ${inputformat} \ + $reference \ + -t $task.cpus \ + ${reads} \ + ${prefix}.${outputformat} + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + stadeniolib: \$(echo \$(scramble -h | head -n 1 |sed 's/^.*version //')) + END_VERSIONS + """ +} diff --git a/modules/stadeniolib/scramble/meta.yml b/modules/stadeniolib/scramble/meta.yml new file mode 100644 index 00000000..7e53a1b4 --- /dev/null +++ b/modules/stadeniolib/scramble/meta.yml @@ -0,0 +1,58 @@ +name: "stadeniolib_scramble" +description: Advanced sequence file format conversions +keywords: + - sam + - bam + - cram + - compression +tools: + - "scramble": + description: "Staden Package 'io_lib' (sometimes referred to as libstaden-read by distributions). This contains code for reading and writing a variety of Bioinformatics / DNA Sequence formats." + homepage: "https://github.com/jkbonfield/io_lib" + documentation: "https://github.com/jkbonfield/io_lib/blob/master/README.md" + tool_dev_url: "https://github.com/jkbonfield/io_lib" + licence: "['BSD']" + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: BAM/CRAM/SAM file + pattern: "*.{bam,cram,sam}" + - fasta: + type: file + description: Reference genome in FASTA format + pattern: "*.{fa,fasta}" + - fai: + type: file + description: FASTA index file from samtools faidx + pattern: "*.{fai}" + - gzi: + type: file + description: Optional gzip index file for BAM inputs + pattern: "*.gzi" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - reads: + type: file + description: Converted reads + pattern: "*.{sam, bam, cram}" + - gzi: + type: Optional file + description: gzip index file for BAM outputs + pattern: ".{bam.gzi}" +authors: + - "@matthdsm" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 31f62c78..2b99f835 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -1707,6 +1707,10 @@ ssuissero: - modules/ssuissero/** - tests/modules/ssuissero/** +stadeniolib/scramble: + - modules/stadeniolib/scramble/** + - tests/modules/stadeniolib/scramble/** + staphopiasccmec: - modules/staphopiasccmec/** - tests/modules/staphopiasccmec/** diff --git a/tests/modules/stadeniolib/scramble/main.nf b/tests/modules/stadeniolib/scramble/main.nf new file mode 100644 index 00000000..d29c6dd8 --- /dev/null +++ b/tests/modules/stadeniolib/scramble/main.nf @@ -0,0 +1,15 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { STADENIOLIB_SCRAMBLE } from '../../../../modules/stadeniolib/scramble/main.nf' + +workflow test_stadeniolib { + + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true) + ] + + STADENIOLIB_SCRAMBLE ( input, file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true), file(params.test_data['sarscov2']['genome']['genome_fasta_fai'], checkIfExists: true), []) +} diff --git a/tests/modules/stadeniolib/scramble/nextflow.config b/tests/modules/stadeniolib/scramble/nextflow.config new file mode 100644 index 00000000..50f50a7a --- /dev/null +++ b/tests/modules/stadeniolib/scramble/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} \ No newline at end of file diff --git a/tests/modules/stadeniolib/scramble/test.yml b/tests/modules/stadeniolib/scramble/test.yml new file mode 100644 index 00000000..cea6fb70 --- /dev/null +++ b/tests/modules/stadeniolib/scramble/test.yml @@ -0,0 +1,7 @@ +- name: stadeniolib test_stadeniolib + command: nextflow run tests/modules/stadeniolib -entry test_stadeniolib -c tests/config/nextflow.config + tags: + - stadeniolib + files: + - path: output/stadeniolib/test.cram + - path: output/stadeniolib/versions.yml From d4160c669b1f7faad3177a847c53516ac932b0c8 Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Fri, 8 Apr 2022 16:02:10 +0200 Subject: [PATCH 104/283] Tool/crosscheckfingerprints (#1505) * first commit * first commit * update test.yml * update test.yml * Update modules/picard/crosscheckfingerprints/main.nf Co-authored-by: Jose Espinosa-Carrasco * Update modules/picard/crosscheckfingerprints/main.nf Co-authored-by: Jose Espinosa-Carrasco * add support for vcf haplotype maps * update test * update test data config, use test data * fix exit code * Update modules/picard/crosscheckfingerprints/main.nf Co-authored-by: Sateesh Peri <33637490+sateeshperi@users.noreply.github.com> * Update modules/picard/crosscheckfingerprints/main.nf Co-authored-by: Sateesh Peri <33637490+sateeshperi@users.noreply.github.com> * remove unused stub Co-authored-by: Jose Espinosa-Carrasco Co-authored-by: Sateesh Peri <33637490+sateeshperi@users.noreply.github.com> --- modules/picard/crosscheckfingerprints/main.nf | 51 ++++++++++++++++++ .../picard/crosscheckfingerprints/meta.yml | 53 +++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/config/test_data.config | 1 + .../picard/crosscheckfingerprints/main.nf | 14 +++++ .../crosscheckfingerprints/nextflow.config | 5 ++ .../picard/crosscheckfingerprints/test.yml | 8 +++ 7 files changed, 136 insertions(+) create mode 100644 modules/picard/crosscheckfingerprints/main.nf create mode 100644 modules/picard/crosscheckfingerprints/meta.yml create mode 100644 tests/modules/picard/crosscheckfingerprints/main.nf create mode 100644 tests/modules/picard/crosscheckfingerprints/nextflow.config create mode 100644 tests/modules/picard/crosscheckfingerprints/test.yml diff --git a/modules/picard/crosscheckfingerprints/main.nf b/modules/picard/crosscheckfingerprints/main.nf new file mode 100644 index 00000000..b3dface5 --- /dev/null +++ b/modules/picard/crosscheckfingerprints/main.nf @@ -0,0 +1,51 @@ +process PICARD_CROSSCHECKFINGERPRINTS { + tag "$meta.id" + label 'process_medium' + + conda (params.enable_conda ? "bioconda::picard=2.26.10" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/picard:2.26.10--hdfd78af_0' : + 'quay.io/biocontainers/picard:2.26.10--hdfd78af_0' }" + + input: + tuple val(meta), path(input1) + path input2 + path haplotype_map + + output: + tuple val(meta), path("*.crosscheck_metrics.txt"), emit: crosscheck_metrics + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + + def input1_string = input1.join(" --INPUT ") + def input2_string = input2 ? "--SECOND_INPUT " + input2.join(" --SECOND_INPUT ") : "" + + def avail_mem = 3 + if (!task.memory) { + log.info '[Picard CrosscheckFingerprints] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } + """ + picard \\ + -Xmx${avail_mem}g \\ + CrosscheckFingerprints \\ + $args \\ + --NUM_THREADS ${task.cpus} \\ + --INPUT $input1_string \\ + $input2_string \\ + --HAPLOTYPE_MAP ${haplotype_map} \\ + --OUTPUT ${prefix}.crosscheck_metrics.txt + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + picard: \$( picard CrosscheckFingerprints --version 2>&1 | grep -o 'Version:.*' | cut -f2- -d: ) + END_VERSIONS + """ +} diff --git a/modules/picard/crosscheckfingerprints/meta.yml b/modules/picard/crosscheckfingerprints/meta.yml new file mode 100644 index 00000000..4f2aff5d --- /dev/null +++ b/modules/picard/crosscheckfingerprints/meta.yml @@ -0,0 +1,53 @@ +name: "picard_crosscheckfingerprints" +description: Checks that all data in the set of input files appear to come from the same individual +keywords: + - alignment + - metrics + - statistics + - fingerprint + - bam +tools: + - picard: + description: | + A set of command line tools (in Java) for manipulating high-throughput sequencing (HTS) + data and formats such as SAM/BAM/CRAM and VCF. + homepage: https://broadinstitute.github.io/picard/ + documentation: https://broadinstitute.github.io/picard/ + tool_dev_url: https://github.com/broadinstitute/picard/ + licence: ["MIT"] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - input1: + type: file + description: List containing 1 or more bam/vcf files or a file containing filepaths + pattern: "*.{bam,vcf,vcf.gz,txt,fofn}" + - input2: + type: file + description: Optional list containing 1 or more bam/vcf files or a file containing filepaths + pattern: "*.{bam,vcf,vcf.gz,txt,fofn}" + - haplotype_map: + type: file + description: Haplotype map file + pattern: "*.{txt,vcf,vcf.gz}" +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - crosscheck_metrics: + type: file + description: Metrics created by crosscheckfingerprints + pattern: "*.{crosscheck_metrics.txt}" + +authors: + - "@matthdsm" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 2b99f835..c0e84cbc 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -1343,6 +1343,10 @@ picard/createsequencedictionary: - modules/picard/createsequencedictionary/** - tests/modules/picard/createsequencedictionary/** +picard/crosscheckfingerprints: + - modules/picard/crosscheckfingerprints/** + - tests/modules/picard/crosscheckfingerprints/** + picard/filtersamreads: - modules/picard/filtersamreads/** - tests/modules/picard/filtersamreads/** diff --git a/tests/config/test_data.config b/tests/config/test_data.config index 836604b8..1a5c377c 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -161,6 +161,7 @@ params { gnomad_r2_1_1_21_vcf_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/genome/chr21/germlineresources/gnomAD.r2.1.1.vcf.gz.tbi" mills_and_1000g_indels_21_vcf_gz = "${test_data_dir}/genomics/homo_sapiens/genome/chr21/germlineresources/mills_and_1000G.indels.hg38.vcf.gz" mills_and_1000g_indels_21_vcf_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/genome/chr21/germlineresources/mills_and_1000G.indels.hg38.vcf.gz.tbi" + haplotype_map = "${test_data_dir}/genomics/homo_sapiens/genome/chr21/germlineresources/haplotype_map.txt" index_salmon = "${test_data_dir}/genomics/homo_sapiens/genome/index/salmon" repeat_expansions = "${test_data_dir}/genomics/homo_sapiens/genome/loci/repeat_expansions.json" diff --git a/tests/modules/picard/crosscheckfingerprints/main.nf b/tests/modules/picard/crosscheckfingerprints/main.nf new file mode 100644 index 00000000..55ddb5c5 --- /dev/null +++ b/tests/modules/picard/crosscheckfingerprints/main.nf @@ -0,0 +1,14 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { PICARD_CROSSCHECKFINGERPRINTS } from '../../../../modules/picard/crosscheckfingerprints/main.nf' + +workflow test_picard_crosscheckfingerprints { + + input = [ + [ id:'test', single_end:false ], // meta map + [file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_bam'], checkIfExists: true), file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true)], + ] + PICARD_CROSSCHECKFINGERPRINTS ( input,[], file(params.test_data['homo_sapiens']['genome']['haplotype_map'], checkIfExists: true)) +} diff --git a/tests/modules/picard/crosscheckfingerprints/nextflow.config b/tests/modules/picard/crosscheckfingerprints/nextflow.config new file mode 100644 index 00000000..aa696290 --- /dev/null +++ b/tests/modules/picard/crosscheckfingerprints/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + withName: PICARD_CROSSCHECKFINGERPRINTS {ext.args = "--EXIT_CODE_WHEN_MISMATCH 0"} +} diff --git a/tests/modules/picard/crosscheckfingerprints/test.yml b/tests/modules/picard/crosscheckfingerprints/test.yml new file mode 100644 index 00000000..534c206d --- /dev/null +++ b/tests/modules/picard/crosscheckfingerprints/test.yml @@ -0,0 +1,8 @@ +- name: "picard crosscheckfingerprints" + command: nextflow run ./tests/modules/picard/crosscheckfingerprints -entry test_picard_crosscheckfingerprints -c ./tests/config/nextflow.config -c ./tests/modules/picard/crosscheckfingerprints/nextflow.config + tags: + - "picard" + - "picard/crosscheckfingerprints" + files: + - path: "output/picard/test.crosscheck_metrics.txt" + - path: output/picard/versions.yml From 8dc680d3b334c6622d1edfa3b97d05dd318371e0 Mon Sep 17 00:00:00 2001 From: Michael L Heuer Date: Sat, 9 Apr 2022 11:13:47 -0500 Subject: [PATCH 105/283] Update dsh-bio to version 2.0.8. (#1483) --- modules/dshbio/exportsegments/main.nf | 6 +++--- modules/dshbio/filterbed/main.nf | 6 +++--- modules/dshbio/filtergff3/main.nf | 6 +++--- modules/dshbio/splitbed/main.nf | 6 +++--- modules/dshbio/splitgff3/main.nf | 6 +++--- 5 files changed, 15 insertions(+), 15 deletions(-) diff --git a/modules/dshbio/exportsegments/main.nf b/modules/dshbio/exportsegments/main.nf index 49442f81..03d0e91a 100644 --- a/modules/dshbio/exportsegments/main.nf +++ b/modules/dshbio/exportsegments/main.nf @@ -2,10 +2,10 @@ process DSHBIO_EXPORTSEGMENTS { tag "${meta.id}" label 'process_medium' - conda (params.enable_conda ? "bioconda::dsh-bio=2.0.7" : null) + conda (params.enable_conda ? "bioconda::dsh-bio=2.0.8" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/dsh-bio:2.0.7--hdfd78af_0' : - 'quay.io/biocontainers/dsh-bio:2.0.7--hdfd78af_0' }" + 'https://depot.galaxyproject.org/singularity/dsh-bio:2.0.8--hdfd78af_0' : + 'quay.io/biocontainers/dsh-bio:2.0.8--hdfd78af_0' }" input: tuple val(meta), path(gfa) diff --git a/modules/dshbio/filterbed/main.nf b/modules/dshbio/filterbed/main.nf index 7e3da24e..7a0a4d86 100644 --- a/modules/dshbio/filterbed/main.nf +++ b/modules/dshbio/filterbed/main.nf @@ -2,10 +2,10 @@ process DSHBIO_FILTERBED { tag "${meta.id}" label 'process_medium' - conda (params.enable_conda ? "bioconda::dsh-bio=2.0.7" : null) + conda (params.enable_conda ? "bioconda::dsh-bio=2.0.8" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/dsh-bio:2.0.7--hdfd78af_0' : - 'quay.io/biocontainers/dsh-bio:2.0.7--hdfd78af_0' }" + 'https://depot.galaxyproject.org/singularity/dsh-bio:2.0.8--hdfd78af_0' : + 'quay.io/biocontainers/dsh-bio:2.0.8--hdfd78af_0' }" input: tuple val(meta), path(bed) diff --git a/modules/dshbio/filtergff3/main.nf b/modules/dshbio/filtergff3/main.nf index 0539bbe0..c6736a49 100644 --- a/modules/dshbio/filtergff3/main.nf +++ b/modules/dshbio/filtergff3/main.nf @@ -2,10 +2,10 @@ process DSHBIO_FILTERGFF3 { tag "${meta.id}" label 'process_medium' - conda (params.enable_conda ? "bioconda::dsh-bio=2.0.7" : null) + conda (params.enable_conda ? "bioconda::dsh-bio=2.0.8" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/dsh-bio:2.0.7--hdfd78af_0' : - 'quay.io/biocontainers/dsh-bio:2.0.7--hdfd78af_0' }" + 'https://depot.galaxyproject.org/singularity/dsh-bio:2.0.8--hdfd78af_0' : + 'quay.io/biocontainers/dsh-bio:2.0.8--hdfd78af_0' }" input: tuple val(meta), path(gff3) diff --git a/modules/dshbio/splitbed/main.nf b/modules/dshbio/splitbed/main.nf index 824c7e4d..9268b5dc 100644 --- a/modules/dshbio/splitbed/main.nf +++ b/modules/dshbio/splitbed/main.nf @@ -2,10 +2,10 @@ process DSHBIO_SPLITBED { tag "${meta.id}" label 'process_medium' - conda (params.enable_conda ? "bioconda::dsh-bio=2.0.7" : null) + conda (params.enable_conda ? "bioconda::dsh-bio=2.0.8" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/dsh-bio:2.0.7--hdfd78af_0' : - 'quay.io/biocontainers/dsh-bio:2.0.7--hdfd78af_0' }" + 'https://depot.galaxyproject.org/singularity/dsh-bio:2.0.8--hdfd78af_0' : + 'quay.io/biocontainers/dsh-bio:2.0.8--hdfd78af_0' }" input: tuple val(meta), path(bed) diff --git a/modules/dshbio/splitgff3/main.nf b/modules/dshbio/splitgff3/main.nf index 424bc368..db887bd6 100644 --- a/modules/dshbio/splitgff3/main.nf +++ b/modules/dshbio/splitgff3/main.nf @@ -2,10 +2,10 @@ process DSHBIO_SPLITGFF3 { tag "${meta.id}" label 'process_medium' - conda (params.enable_conda ? "bioconda::dsh-bio=2.0.7" : null) + conda (params.enable_conda ? "bioconda::dsh-bio=2.0.8" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/dsh-bio:2.0.7--hdfd78af_0' : - 'quay.io/biocontainers/dsh-bio:2.0.7--hdfd78af_0' }" + 'https://depot.galaxyproject.org/singularity/dsh-bio:2.0.8--hdfd78af_0' : + 'quay.io/biocontainers/dsh-bio:2.0.8--hdfd78af_0' }" input: tuple val(meta), path(gff3) From 897c33d5da084b61109500ee44c01da2d3e4e773 Mon Sep 17 00:00:00 2001 From: FriederikeHanssen Date: Mon, 11 Apr 2022 14:26:28 +0200 Subject: [PATCH 106/283] Samtools version update (#1507) * Fix typo * update version to 1.15.1 * Fix md5sums * update mulled containers * update md5sums * update md5sums --- modules/bbmap/align/main.nf | 6 +++--- modules/bbmap/pileup/main.nf | 6 +++--- modules/bowtie/align/main.nf | 6 +++--- modules/bowtie2/align/main.nf | 6 +++--- modules/bwa/mem/main.nf | 6 +++--- modules/bwa/sampe/main.nf | 6 +++--- modules/bwa/samse/main.nf | 6 +++--- modules/bwamem2/mem/main.nf | 6 +++--- modules/chromap/chromap/main.nf | 6 +++--- modules/dragmap/align/main.nf | 6 +++--- modules/hisat2/align/main.nf | 6 +++--- modules/qualimap/bamqccram/main.nf | 6 +++--- modules/samblaster/main.nf | 6 +++--- modules/samtools/ampliconclip/main.nf | 6 +++--- modules/samtools/bam2fq/main.nf | 6 +++--- modules/samtools/depth/main.nf | 6 +++--- modules/samtools/faidx/main.nf | 6 +++--- modules/samtools/fastq/main.nf | 6 +++--- modules/samtools/fixmate/main.nf | 6 +++--- modules/samtools/flagstat/main.nf | 6 +++--- modules/samtools/idxstats/main.nf | 6 +++--- modules/samtools/index/main.nf | 6 +++--- modules/samtools/merge/main.nf | 6 +++--- modules/samtools/mpileup/main.nf | 7 +++---- modules/samtools/sort/main.nf | 6 +++--- modules/samtools/stats/main.nf | 6 +++--- modules/samtools/view/main.nf | 6 +++--- modules/star/genomegenerate/main.nf | 6 +++--- modules/yara/mapper/main.nf | 6 +++--- tests/modules/bbmap/align/test.yml | 8 ++++---- tests/modules/bbmap/pileup/test.yml | 2 +- tests/modules/bwa/sampe/test.yml | 2 +- tests/modules/bwa/samse/test.yml | 2 +- tests/modules/chromap/chromap/test.yml | 5 +---- tests/modules/samblaster/test.yml | 2 +- tests/modules/samtools/ampliconclip/test.yml | 10 +++++----- tests/modules/samtools/faidx/test.yml | 1 - tests/modules/samtools/fixmate/test.yml | 2 +- tests/modules/samtools/mpileup/test.yml | 2 -- tests/modules/samtools/sort/test.yml | 2 +- tests/modules/samtools/stats/test.yml | 4 ++-- 41 files changed, 105 insertions(+), 112 deletions(-) diff --git a/modules/bbmap/align/main.nf b/modules/bbmap/align/main.nf index 914399c5..aa1fbe1a 100644 --- a/modules/bbmap/align/main.nf +++ b/modules/bbmap/align/main.nf @@ -2,10 +2,10 @@ process BBMAP_ALIGN { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? "bioconda::bbmap=38.92 bioconda::samtools=1.13 pigz=2.6" : null) + conda (params.enable_conda ? "bioconda::bbmap=38.92 bioconda::samtools=1.15.1 pigz=2.6" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/mulled-v2-008daec56b7aaf3f162d7866758142b9f889d690:f5f55fc5623bb7b3f725e8d2f86bedacfd879510-0' : - 'quay.io/biocontainers/mulled-v2-008daec56b7aaf3f162d7866758142b9f889d690:f5f55fc5623bb7b3f725e8d2f86bedacfd879510-0' }" + 'https://depot.galaxyproject.org/singularity/mulled-v2-008daec56b7aaf3f162d7866758142b9f889d690:2fee0e0facec1dfe32a1ee4aa516aef7d0296ebf-0' : + 'quay.io/biocontainers/mulled-v2-008daec56b7aaf3f162d7866758142b9f889d690:2fee0e0facec1dfe32a1ee4aa516aef7d0296ebf-0' }" input: tuple val(meta), path(fastq) diff --git a/modules/bbmap/pileup/main.nf b/modules/bbmap/pileup/main.nf index 8d424bc2..1f34efc5 100644 --- a/modules/bbmap/pileup/main.nf +++ b/modules/bbmap/pileup/main.nf @@ -2,10 +2,10 @@ process BBMAP_PILEUP { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? "bioconda::bbmap=38.92 bioconda::samtools=1.13 pigz=2.6" : null) + conda (params.enable_conda ? "bioconda::bbmap=38.92 bioconda::samtools=1.15.1 pigz=2.6" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/mulled-v2-008daec56b7aaf3f162d7866758142b9f889d690:f5f55fc5623bb7b3f725e8d2f86bedacfd879510-0' : - 'quay.io/biocontainers/mulled-v2-008daec56b7aaf3f162d7866758142b9f889d690:f5f55fc5623bb7b3f725e8d2f86bedacfd879510-0' }" + 'https://depot.galaxyproject.org/singularity/mulled-v2-008daec56b7aaf3f162d7866758142b9f889d690:2fee0e0facec1dfe32a1ee4aa516aef7d0296ebf-0' : + 'quay.io/biocontainers/mulled-v2-008daec56b7aaf3f162d7866758142b9f889d690:2fee0e0facec1dfe32a1ee4aa516aef7d0296ebf-0' }" input: tuple val(meta), path(bam) diff --git a/modules/bowtie/align/main.nf b/modules/bowtie/align/main.nf index ba82b67d..d2cba0e4 100644 --- a/modules/bowtie/align/main.nf +++ b/modules/bowtie/align/main.nf @@ -2,10 +2,10 @@ process BOWTIE_ALIGN { tag "$meta.id" label 'process_high' - conda (params.enable_conda ? 'bioconda::bowtie=1.3.0 bioconda::samtools=1.11' : null) + conda (params.enable_conda ? 'bioconda::bowtie=1.3.0 bioconda::samtools=1.15.1' : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/mulled-v2-ffbf83a6b0ab6ec567a336cf349b80637135bca3:9e14e16c284d6860574cf5b624bbc44c793cb024-0' : - 'quay.io/biocontainers/mulled-v2-ffbf83a6b0ab6ec567a336cf349b80637135bca3:9e14e16c284d6860574cf5b624bbc44c793cb024-0' }" + 'https://depot.galaxyproject.org/singularity/mulled-v2-ffbf83a6b0ab6ec567a336cf349b80637135bca3:676c5bcfe34af6097728fea60fb7ea83f94a4a5f-0' : + 'quay.io/biocontainers/mulled-v2-ffbf83a6b0ab6ec567a336cf349b80637135bca3:676c5bcfe34af6097728fea60fb7ea83f94a4a5f-0' }" input: tuple val(meta), path(reads) diff --git a/modules/bowtie2/align/main.nf b/modules/bowtie2/align/main.nf index 7e8a9659..44ce76ca 100644 --- a/modules/bowtie2/align/main.nf +++ b/modules/bowtie2/align/main.nf @@ -2,10 +2,10 @@ process BOWTIE2_ALIGN { tag "$meta.id" label 'process_high' - conda (params.enable_conda ? 'bioconda::bowtie2=2.4.4 bioconda::samtools=1.14 conda-forge::pigz=2.6' : null) + conda (params.enable_conda ? 'bioconda::bowtie2=2.4.4 bioconda::samtools=1.15.1 conda-forge::pigz=2.6' : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/mulled-v2-ac74a7f02cebcfcc07d8e8d1d750af9c83b4d45a:4d235f41348a00533f18e47c9669f1ecb327f629-0' : - 'quay.io/biocontainers/mulled-v2-ac74a7f02cebcfcc07d8e8d1d750af9c83b4d45a:4d235f41348a00533f18e47c9669f1ecb327f629-0' }" + 'https://depot.galaxyproject.org/singularity/mulled-v2-ac74a7f02cebcfcc07d8e8d1d750af9c83b4d45a:1744f68fe955578c63054b55309e05b41c37a80d-0' : + 'quay.io/biocontainers/mulled-v2-ac74a7f02cebcfcc07d8e8d1d750af9c83b4d45a:1744f68fe955578c63054b55309e05b41c37a80d-0' }" input: tuple val(meta), path(reads) diff --git a/modules/bwa/mem/main.nf b/modules/bwa/mem/main.nf index 27ea6f42..ffa51908 100644 --- a/modules/bwa/mem/main.nf +++ b/modules/bwa/mem/main.nf @@ -2,10 +2,10 @@ process BWA_MEM { tag "$meta.id" label 'process_high' - conda (params.enable_conda ? "bioconda::bwa=0.7.17 bioconda::samtools=1.15" : null) + conda (params.enable_conda ? "bioconda::bwa=0.7.17 bioconda::samtools=1.15.1" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:c56a3aabc8d64e52d5b9da1e8ecec2031668596d-0' : - 'quay.io/biocontainers/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:c56a3aabc8d64e52d5b9da1e8ecec2031668596d-0' }" + 'https://depot.galaxyproject.org/singularity/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:8110a70be2bfe7f75a2ea7f2a89cda4cc7732095-0' : + 'quay.io/biocontainers/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:8110a70be2bfe7f75a2ea7f2a89cda4cc7732095-0' }" input: tuple val(meta), path(reads) diff --git a/modules/bwa/sampe/main.nf b/modules/bwa/sampe/main.nf index 73345d81..cfe9529d 100644 --- a/modules/bwa/sampe/main.nf +++ b/modules/bwa/sampe/main.nf @@ -2,10 +2,10 @@ process BWA_SAMPE { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? "bioconda::bwa=0.7.17 bioconda::samtools=1.15" : null) + conda (params.enable_conda ? "bioconda::bwa=0.7.17 bioconda::samtools=1.15.1" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:c56a3aabc8d64e52d5b9da1e8ecec2031668596d-0' : - 'quay.io/biocontainers/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:c56a3aabc8d64e52d5b9da1e8ecec2031668596d-0' }" + 'https://depot.galaxyproject.org/singularity/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:8110a70be2bfe7f75a2ea7f2a89cda4cc7732095-0' : + 'quay.io/biocontainers/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:8110a70be2bfe7f75a2ea7f2a89cda4cc7732095-0' }" input: tuple val(meta), path(reads), path(sai) diff --git a/modules/bwa/samse/main.nf b/modules/bwa/samse/main.nf index 2c327d99..fed412f2 100644 --- a/modules/bwa/samse/main.nf +++ b/modules/bwa/samse/main.nf @@ -2,10 +2,10 @@ process BWA_SAMSE { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? "bioconda::bwa=0.7.17 bioconda::samtools=1.15" : null) + conda (params.enable_conda ? "bioconda::bwa=0.7.17 bioconda::samtools=1.15.1" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:c56a3aabc8d64e52d5b9da1e8ecec2031668596d-0' : - 'quay.io/biocontainers/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:c56a3aabc8d64e52d5b9da1e8ecec2031668596d-0' }" + 'https://depot.galaxyproject.org/singularity/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:8110a70be2bfe7f75a2ea7f2a89cda4cc7732095-0' : + 'quay.io/biocontainers/mulled-v2-fe8faa35dbf6dc65a0f7f5d4ea12e31a79f73e40:8110a70be2bfe7f75a2ea7f2a89cda4cc7732095-0' }" input: tuple val(meta), path(reads), path(sai) diff --git a/modules/bwamem2/mem/main.nf b/modules/bwamem2/mem/main.nf index e3a3d164..50d84cb0 100644 --- a/modules/bwamem2/mem/main.nf +++ b/modules/bwamem2/mem/main.nf @@ -2,10 +2,10 @@ process BWAMEM2_MEM { tag "$meta.id" label 'process_high' - conda (params.enable_conda ? "bioconda::bwa-mem2=2.2.1 bioconda::samtools=1.15" : null) + conda (params.enable_conda ? "bioconda::bwa-mem2=2.2.1 bioconda::samtools=1.15.1" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/mulled-v2-e5d375990341c5aef3c9aff74f96f66f65375ef6:8ee25ae85d7a2bacac3e3139db209aff3d605a18-0' : - 'quay.io/biocontainers/mulled-v2-e5d375990341c5aef3c9aff74f96f66f65375ef6:8ee25ae85d7a2bacac3e3139db209aff3d605a18-0' }" + 'https://depot.galaxyproject.org/singularity/mulled-v2-e5d375990341c5aef3c9aff74f96f66f65375ef6:38aed4501da19db366dc7c8d52d31d94e760cfaf-0' : + 'quay.io/biocontainers/mulled-v2-e5d375990341c5aef3c9aff74f96f66f65375ef6:38aed4501da19db366dc7c8d52d31d94e760cfaf-0' }" input: tuple val(meta), path(reads) diff --git a/modules/chromap/chromap/main.nf b/modules/chromap/chromap/main.nf index bf3d1234..137f0340 100644 --- a/modules/chromap/chromap/main.nf +++ b/modules/chromap/chromap/main.nf @@ -2,10 +2,10 @@ process CHROMAP_CHROMAP { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? "bioconda::chromap=0.2.1 bioconda::samtools=1.15" : null) + conda (params.enable_conda ? "bioconda::chromap=0.2.1 bioconda::samtools=1.15.1" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/mulled-v2-1f09f39f20b1c4ee36581dc81cc323c70e661633:bd74d08a359024829a7aec1638a28607bbcd8a58-0' : - 'quay.io/biocontainers/mulled-v2-1f09f39f20b1c4ee36581dc81cc323c70e661633:bd74d08a359024829a7aec1638a28607bbcd8a58-0' }" + 'https://depot.galaxyproject.org/singularity/mulled-v2-1f09f39f20b1c4ee36581dc81cc323c70e661633:963e4fe6a85c548a4018585660aed79780a175d3-0' : + 'quay.io/biocontainers/mulled-v2-1f09f39f20b1c4ee36581dc81cc323c70e661633:963e4fe6a85c548a4018585660aed79780a175d3-0' }" input: tuple val(meta), path(reads) diff --git a/modules/dragmap/align/main.nf b/modules/dragmap/align/main.nf index ee94a9a8..b7f1e33b 100644 --- a/modules/dragmap/align/main.nf +++ b/modules/dragmap/align/main.nf @@ -2,10 +2,10 @@ process DRAGMAP_ALIGN { tag "$meta.id" label 'process_high' - conda (params.enable_conda ? "bioconda::dragmap=1.2.1 bioconda::samtools=1.14 conda-forge::pigz=2.3.4" : null) + conda (params.enable_conda ? "bioconda::dragmap=1.2.1 bioconda::samtools=1.15.1 conda-forge::pigz=2.3.4" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/mulled-v2-580d344d9d4a496cd403932da8765f9e0187774d:f7aad9060cde739c95685fc5ff6d6f7e3ec629c8-0': - 'quay.io/biocontainers/mulled-v2-580d344d9d4a496cd403932da8765f9e0187774d:f7aad9060cde739c95685fc5ff6d6f7e3ec629c8-0' }" + 'https://depot.galaxyproject.org/singularity/mulled-v2-580d344d9d4a496cd403932da8765f9e0187774d:5ebebbc128cd624282eaa37d2c7fe01505a91a69-0': + 'quay.io/biocontainers/mulled-v2-580d344d9d4a496cd403932da8765f9e0187774d:5ebebbc128cd624282eaa37d2c7fe01505a91a69-0' }" input: tuple val(meta), path(reads) diff --git a/modules/hisat2/align/main.nf b/modules/hisat2/align/main.nf index 7f680018..0a45ce72 100644 --- a/modules/hisat2/align/main.nf +++ b/modules/hisat2/align/main.nf @@ -4,10 +4,10 @@ process HISAT2_ALIGN { tag "$meta.id" label 'process_high' - conda (params.enable_conda ? "bioconda::hisat2=2.2.0 bioconda::samtools=1.10" : null) + conda (params.enable_conda ? "bioconda::hisat2=2.2.0 bioconda::samtools=1.15.1" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/mulled-v2-a97e90b3b802d1da3d6958e0867610c718cb5eb1:2880dd9d8ad0a7b221d4eacda9a818e92983128d-0' : - 'quay.io/biocontainers/mulled-v2-a97e90b3b802d1da3d6958e0867610c718cb5eb1:2880dd9d8ad0a7b221d4eacda9a818e92983128d-0' }" + 'https://depot.galaxyproject.org/singularity/mulled-v2-a97e90b3b802d1da3d6958e0867610c718cb5eb1:0e773bb207600fcb4d38202226eb20a33c7909b6-0' : + 'quay.io/biocontainers/mulled-v2-a97e90b3b802d1da3d6958e0867610c718cb5eb1:0e773bb207600fcb4d38202226eb20a33c7909b6-0' }" input: tuple val(meta), path(reads) diff --git a/modules/qualimap/bamqccram/main.nf b/modules/qualimap/bamqccram/main.nf index ab3fd51a..e136b8e2 100644 --- a/modules/qualimap/bamqccram/main.nf +++ b/modules/qualimap/bamqccram/main.nf @@ -2,10 +2,10 @@ process QUALIMAP_BAMQCCRAM { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? "bioconda::qualimap=2.2.2d bioconda::samtools=1.15" : null) + conda (params.enable_conda ? "bioconda::qualimap=2.2.2d bioconda::samtools=1.15.1" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/mulled-v2-d3934ca6bb4e61334891ffa2e9a4c87a530e3188:9838874d42d4477d5042782ee019cec9854da7d5-0' : - 'quay.io/biocontainers/mulled-v2-d3934ca6bb4e61334891ffa2e9a4c87a530e3188:9838874d42d4477d5042782ee019cec9854da7d5-0' }" + 'https://depot.galaxyproject.org/singularity/mulled-v2-d3934ca6bb4e61334891ffa2e9a4c87a530e3188:61f6d4658ac88635fc37623af50bba77561988ab-0' : + 'quay.io/biocontainers/mulled-v2-d3934ca6bb4e61334891ffa2e9a4c87a530e3188:61f6d4658ac88635fc37623af50bba77561988ab-0' }" input: tuple val(meta), path(cram), path(crai) diff --git a/modules/samblaster/main.nf b/modules/samblaster/main.nf index c881389a..225c7152 100644 --- a/modules/samblaster/main.nf +++ b/modules/samblaster/main.nf @@ -2,10 +2,10 @@ process SAMBLASTER { tag "$meta.id" label 'process_low' - conda (params.enable_conda ? "bioconda::samblaster=0.1.26 bioconda::samtools=1.14" : null) + conda (params.enable_conda ? "bioconda::samblaster=0.1.26 bioconda::samtools=1.15.1" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/mulled-v2-19fa9f1a5c3966b63a24166365e81da35738c5ab:ba4a02b56f3e524a6e006bcd99fe8cc1d7fe09eb-0' : - 'quay.io/biocontainers/mulled-v2-19fa9f1a5c3966b63a24166365e81da35738c5ab:ba4a02b56f3e524a6e006bcd99fe8cc1d7fe09eb-0' }" + 'https://depot.galaxyproject.org/singularity/mulled-v2-19fa9f1a5c3966b63a24166365e81da35738c5ab:fff03944e664bbf9a139f7b174b9cb2d4163271a-0' : + 'quay.io/biocontainers/mulled-v2-19fa9f1a5c3966b63a24166365e81da35738c5ab:fff03944e664bbf9a139f7b174b9cb2d4163271a-0' }" input: tuple val(meta), path(bam) diff --git a/modules/samtools/ampliconclip/main.nf b/modules/samtools/ampliconclip/main.nf index 4e76b1b4..2b90c953 100644 --- a/modules/samtools/ampliconclip/main.nf +++ b/modules/samtools/ampliconclip/main.nf @@ -2,10 +2,10 @@ process SAMTOOLS_AMPLICONCLIP { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? "bioconda::samtools=1.15" : null) + conda (params.enable_conda ? "bioconda::samtools=1.15.1" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/samtools:1.15--h1170115_1' : - 'quay.io/biocontainers/samtools:1.15--h1170115_1' }" + 'https://depot.galaxyproject.org/singularity/samtools:1.15.1--h1170115_0' : + 'quay.io/biocontainers/samtools:1.15.1--h1170115_0' }" input: tuple val(meta), path(bam) diff --git a/modules/samtools/bam2fq/main.nf b/modules/samtools/bam2fq/main.nf index 8dd64dc0..5d6aa79d 100644 --- a/modules/samtools/bam2fq/main.nf +++ b/modules/samtools/bam2fq/main.nf @@ -2,10 +2,10 @@ process SAMTOOLS_BAM2FQ { tag "$meta.id" label 'process_low' - conda (params.enable_conda ? "bioconda::samtools=1.15" : null) + conda (params.enable_conda ? "bioconda::samtools=1.15.1" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/samtools:1.15--h1170115_1' : - 'quay.io/biocontainers/samtools:1.15--h1170115_1' }" + 'https://depot.galaxyproject.org/singularity/samtools:1.15.1--h1170115_0' : + 'quay.io/biocontainers/samtools:1.15.1--h1170115_0' }" input: tuple val(meta), path(inputbam) diff --git a/modules/samtools/depth/main.nf b/modules/samtools/depth/main.nf index 4870b2d8..e508a5f7 100644 --- a/modules/samtools/depth/main.nf +++ b/modules/samtools/depth/main.nf @@ -2,10 +2,10 @@ process SAMTOOLS_DEPTH { tag "$meta.id" label 'process_low' - conda (params.enable_conda ? "bioconda::samtools=1.15" : null) + conda (params.enable_conda ? "bioconda::samtools=1.15.1" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/samtools:1.15--h1170115_1' : - 'quay.io/biocontainers/samtools:1.15--h1170115_1' }" + 'https://depot.galaxyproject.org/singularity/samtools:1.15.1--h1170115_0' : + 'quay.io/biocontainers/samtools:1.15.1--h1170115_0' }" input: tuple val(meta), path(bam) diff --git a/modules/samtools/faidx/main.nf b/modules/samtools/faidx/main.nf index 053279ff..fdce7d9b 100644 --- a/modules/samtools/faidx/main.nf +++ b/modules/samtools/faidx/main.nf @@ -2,10 +2,10 @@ process SAMTOOLS_FAIDX { tag "$fasta" label 'process_low' - conda (params.enable_conda ? "bioconda::samtools=1.15" : null) + conda (params.enable_conda ? "bioconda::samtools=1.15.1" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/samtools:1.15--h1170115_1' : - 'quay.io/biocontainers/samtools:1.15--h1170115_1' }" + 'https://depot.galaxyproject.org/singularity/samtools:1.15.1--h1170115_0' : + 'quay.io/biocontainers/samtools:1.15.1--h1170115_0' }" input: tuple val(meta), path(fasta) diff --git a/modules/samtools/fastq/main.nf b/modules/samtools/fastq/main.nf index 6408d4a4..8d9b9d08 100644 --- a/modules/samtools/fastq/main.nf +++ b/modules/samtools/fastq/main.nf @@ -2,10 +2,10 @@ process SAMTOOLS_FASTQ { tag "$meta.id" label 'process_low' - conda (params.enable_conda ? "bioconda::samtools=1.15" : null) + conda (params.enable_conda ? "bioconda::samtools=1.15.1" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/samtools:1.15--h1170115_1' : - 'quay.io/biocontainers/samtools:1.15--h1170115_1' }" + 'https://depot.galaxyproject.org/singularity/samtools:1.15.1--h1170115_0' : + 'quay.io/biocontainers/samtools:1.15.1--h1170115_0' }" input: tuple val(meta), path(bam) diff --git a/modules/samtools/fixmate/main.nf b/modules/samtools/fixmate/main.nf index 14c9db9f..f5e16f67 100644 --- a/modules/samtools/fixmate/main.nf +++ b/modules/samtools/fixmate/main.nf @@ -2,10 +2,10 @@ process SAMTOOLS_FIXMATE { tag "$meta.id" label 'process_low' - conda (params.enable_conda ? "bioconda::samtools=1.15" : null) + conda (params.enable_conda ? "bioconda::samtools=1.15.1" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/samtools:1.15--h1170115_1' : - 'quay.io/biocontainers/samtools:1.15--h1170115_1' }" + 'https://depot.galaxyproject.org/singularity/samtools:1.15.1--h1170115_0' : + 'quay.io/biocontainers/samtools:1.15.1--h1170115_0' }" input: tuple val(meta), path(bam) diff --git a/modules/samtools/flagstat/main.nf b/modules/samtools/flagstat/main.nf index 9e3440ac..b87b2108 100644 --- a/modules/samtools/flagstat/main.nf +++ b/modules/samtools/flagstat/main.nf @@ -2,10 +2,10 @@ process SAMTOOLS_FLAGSTAT { tag "$meta.id" label 'process_low' - conda (params.enable_conda ? "bioconda::samtools=1.15" : null) + conda (params.enable_conda ? "bioconda::samtools=1.15.1" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/samtools:1.15--h1170115_1' : - 'quay.io/biocontainers/samtools:1.15--h1170115_1' }" + 'https://depot.galaxyproject.org/singularity/samtools:1.15.1--h1170115_0' : + 'quay.io/biocontainers/samtools:1.15.1--h1170115_0' }" input: tuple val(meta), path(bam), path(bai) diff --git a/modules/samtools/idxstats/main.nf b/modules/samtools/idxstats/main.nf index 7d5cee17..a49ff35f 100644 --- a/modules/samtools/idxstats/main.nf +++ b/modules/samtools/idxstats/main.nf @@ -2,10 +2,10 @@ process SAMTOOLS_IDXSTATS { tag "$meta.id" label 'process_low' - conda (params.enable_conda ? "bioconda::samtools=1.15" : null) + conda (params.enable_conda ? "bioconda::samtools=1.15.1" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/samtools:1.15--h1170115_1' : - 'quay.io/biocontainers/samtools:1.15--h1170115_1' }" + 'https://depot.galaxyproject.org/singularity/samtools:1.15.1--h1170115_0' : + 'quay.io/biocontainers/samtools:1.15.1--h1170115_0' }" input: tuple val(meta), path(bam), path(bai) diff --git a/modules/samtools/index/main.nf b/modules/samtools/index/main.nf index fff6e1b8..e04e63e8 100644 --- a/modules/samtools/index/main.nf +++ b/modules/samtools/index/main.nf @@ -2,10 +2,10 @@ process SAMTOOLS_INDEX { tag "$meta.id" label 'process_low' - conda (params.enable_conda ? "bioconda::samtools=1.15" : null) + conda (params.enable_conda ? "bioconda::samtools=1.15.1" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/samtools:1.15--h1170115_1' : - 'quay.io/biocontainers/samtools:1.15--h1170115_1' }" + 'https://depot.galaxyproject.org/singularity/samtools:1.15.1--h1170115_0' : + 'quay.io/biocontainers/samtools:1.15.1--h1170115_0' }" input: tuple val(meta), path(input) diff --git a/modules/samtools/merge/main.nf b/modules/samtools/merge/main.nf index 9f962a4b..bbf7e8fb 100644 --- a/modules/samtools/merge/main.nf +++ b/modules/samtools/merge/main.nf @@ -2,10 +2,10 @@ process SAMTOOLS_MERGE { tag "$meta.id" label 'process_low' - conda (params.enable_conda ? "bioconda::samtools=1.15" : null) + conda (params.enable_conda ? "bioconda::samtools=1.15.1" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/samtools:1.15--h1170115_1' : - 'quay.io/biocontainers/samtools:1.15--h1170115_1' }" + 'https://depot.galaxyproject.org/singularity/samtools:1.15.1--h1170115_0' : + 'quay.io/biocontainers/samtools:1.15.1--h1170115_0' }" input: tuple val(meta), path(input_files) diff --git a/modules/samtools/mpileup/main.nf b/modules/samtools/mpileup/main.nf index 474a2492..fcd498be 100644 --- a/modules/samtools/mpileup/main.nf +++ b/modules/samtools/mpileup/main.nf @@ -2,11 +2,10 @@ process SAMTOOLS_MPILEUP { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? "bioconda::samtools=1.15" : null) + conda (params.enable_conda ? "bioconda::samtools=1.15.1" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/samtools:1.15--h1170115_1' : - 'quay.io/biocontainers/samtools:1.15--h1170115_1' }" - + 'https://depot.galaxyproject.org/singularity/samtools:1.15.1--h1170115_0' : + 'quay.io/biocontainers/samtools:1.15.1--h1170115_0' }" input: tuple val(meta), path(input), path(intervals) path fasta diff --git a/modules/samtools/sort/main.nf b/modules/samtools/sort/main.nf index ba46f0c9..b4fc1cbe 100644 --- a/modules/samtools/sort/main.nf +++ b/modules/samtools/sort/main.nf @@ -2,10 +2,10 @@ process SAMTOOLS_SORT { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? "bioconda::samtools=1.15" : null) + conda (params.enable_conda ? "bioconda::samtools=1.15.1" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/samtools:1.15--h1170115_1' : - 'quay.io/biocontainers/samtools:1.15--h1170115_1' }" + 'https://depot.galaxyproject.org/singularity/samtools:1.15.1--h1170115_0' : + 'quay.io/biocontainers/samtools:1.15.1--h1170115_0' }" input: tuple val(meta), path(bam) diff --git a/modules/samtools/stats/main.nf b/modules/samtools/stats/main.nf index 85cb64f3..bbdc3240 100644 --- a/modules/samtools/stats/main.nf +++ b/modules/samtools/stats/main.nf @@ -2,10 +2,10 @@ process SAMTOOLS_STATS { tag "$meta.id" label 'process_low' - conda (params.enable_conda ? "bioconda::samtools=1.15" : null) + conda (params.enable_conda ? "bioconda::samtools=1.15.1" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/samtools:1.15--h1170115_1' : - 'quay.io/biocontainers/samtools:1.15--h1170115_1' }" + 'https://depot.galaxyproject.org/singularity/samtools:1.15.1--h1170115_0' : + 'quay.io/biocontainers/samtools:1.15.1--h1170115_0' }" input: tuple val(meta), path(input), path(input_index) diff --git a/modules/samtools/view/main.nf b/modules/samtools/view/main.nf index 75aad063..5f14fbbf 100644 --- a/modules/samtools/view/main.nf +++ b/modules/samtools/view/main.nf @@ -2,10 +2,10 @@ process SAMTOOLS_VIEW { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? "bioconda::samtools=1.15" : null) + conda (params.enable_conda ? "bioconda::samtools=1.15.1" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/samtools:1.15--h1170115_1' : - 'quay.io/biocontainers/samtools:1.15--h1170115_1' }" + 'https://depot.galaxyproject.org/singularity/samtools:1.15.1--h1170115_0' : + 'quay.io/biocontainers/samtools:1.15.1--h1170115_0' }" input: tuple val(meta), path(input) diff --git a/modules/star/genomegenerate/main.nf b/modules/star/genomegenerate/main.nf index 50d280dd..e5568f1d 100644 --- a/modules/star/genomegenerate/main.nf +++ b/modules/star/genomegenerate/main.nf @@ -3,10 +3,10 @@ process STAR_GENOMEGENERATE { label 'process_high' // Note: 2.7X indices incompatible with AWS iGenomes. - conda (params.enable_conda ? "bioconda::star=2.7.9a bioconda::samtools=1.13 conda-forge::gawk=5.1.0" : null) + conda (params.enable_conda ? "bioconda::star=2.7.9a bioconda::samtools=1.15.1 conda-forge::gawk=5.1.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/mulled-v2-1fa26d1ce03c295fe2fdcf85831a92fbcbd7e8c2:a7908dfb0485a80ca94e4d17b0ac991532e4e989-0' : - 'quay.io/biocontainers/mulled-v2-1fa26d1ce03c295fe2fdcf85831a92fbcbd7e8c2:a7908dfb0485a80ca94e4d17b0ac991532e4e989-0' }" + 'https://depot.galaxyproject.org/singularity/mulled-v2-1fa26d1ce03c295fe2fdcf85831a92fbcbd7e8c2:1c4c32d87798d425c970ececfbadd155e7560277-0' : + 'quay.io/biocontainers/mulled-v2-1fa26d1ce03c295fe2fdcf85831a92fbcbd7e8c2:1c4c32d87798d425c970ececfbadd155e7560277-0' }" input: path fasta diff --git a/modules/yara/mapper/main.nf b/modules/yara/mapper/main.nf index 15b39236..9497fe86 100644 --- a/modules/yara/mapper/main.nf +++ b/modules/yara/mapper/main.nf @@ -2,10 +2,10 @@ process YARA_MAPPER { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? "bioconda::yara=1.0.2 bioconda::samtools=1.12" : null) + conda (params.enable_conda ? "bioconda::yara=1.0.2 bioconda::samtools=1.15.1" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/mulled-v2-f13549097a0d1ca36f9d4f017636fb3609f6c083:f794a548b8692f29264c8984ff116c2141b90d9e-0' : - 'quay.io/biocontainers/mulled-v2-f13549097a0d1ca36f9d4f017636fb3609f6c083:f794a548b8692f29264c8984ff116c2141b90d9e-0' }" + 'https://depot.galaxyproject.org/singularity/mulled-v2-f13549097a0d1ca36f9d4f017636fb3609f6c083:d6c969c1e20cc02a9234961c07a24bb0887f05ea-0' : + 'quay.io/biocontainers/mulled-v2-f13549097a0d1ca36f9d4f017636fb3609f6c083:d6c969c1e20cc02a9234961c07a24bb0887f05ea-0' }" input: tuple val(meta), path(reads) diff --git a/tests/modules/bbmap/align/test.yml b/tests/modules/bbmap/align/test.yml index d9f9a862..aa3a9c1c 100644 --- a/tests/modules/bbmap/align/test.yml +++ b/tests/modules/bbmap/align/test.yml @@ -5,7 +5,7 @@ - bbmap/align files: - path: output/bbmap/test.bam - md5sum: e0ec7f1eec537acf146fac1cbdd868d1 + md5sum: 8549040067d23949bddb6fe2ad211c92 - path: output/bbmap/test.bbmap.log - name: bbmap align paired end index ref @@ -15,7 +15,7 @@ - bbmap/align files: - path: output/bbmap/test.bam - md5sum: 345a72a0d58366d75dd263b107caa460 + md5sum: aeb842491ca6c7806aa7103b5223620f - path: output/bbmap/test.bbmap.log - name: bbmap align single end index ref @@ -25,7 +25,7 @@ - bbmap/align files: - path: output/bbmap/test.bam - md5sum: 95f690636581ce9b27cf8568c715ae4d + md5sum: b6a41cb344a343d46244d8f94eb66ec0 - path: output/bbmap/test.bbmap.log - name: bbmap align paired end index ref pigz @@ -35,5 +35,5 @@ - bbmap/align files: - path: output/bbmap/test.bam - md5sum: 441c4f196b9a82c7b224903538064308 + md5sum: 74944e24acccb8c5abc316dcdd623c84 - path: output/bbmap/test.bbmap.log diff --git a/tests/modules/bbmap/pileup/test.yml b/tests/modules/bbmap/pileup/test.yml index 84814a7a..272cdcf7 100644 --- a/tests/modules/bbmap/pileup/test.yml +++ b/tests/modules/bbmap/pileup/test.yml @@ -9,4 +9,4 @@ - path: "output/bbmap/test.coverage.hist.txt" md5sum: 96915920ef42ddc9483457dd4585a088 - path: output/bbmap/versions.yml - md5sum: 894acc38bdc167dc22851df15e5a8453 + md5sum: e2bc51873b24e7fea269b7c1501de060 diff --git a/tests/modules/bwa/sampe/test.yml b/tests/modules/bwa/sampe/test.yml index bf221ebc..c3eb42f8 100644 --- a/tests/modules/bwa/sampe/test.yml +++ b/tests/modules/bwa/sampe/test.yml @@ -5,4 +5,4 @@ - bwa/sampe files: - path: output/bwa/test.bam - md5sum: 01d1d71c88b6de07ed51d1d06e9e970b + md5sum: 67528d633a1a78e3d0e8d1486c1a960a diff --git a/tests/modules/bwa/samse/test.yml b/tests/modules/bwa/samse/test.yml index c45f69dc..3af39258 100644 --- a/tests/modules/bwa/samse/test.yml +++ b/tests/modules/bwa/samse/test.yml @@ -5,4 +5,4 @@ - bwa/samse files: - path: output/bwa/test.bam - md5sum: ddfa4a8f6b65d44704a2d9528abc7e79 + md5sum: 9a0ca9678a03e6fa4bda459c04c99bd6 diff --git a/tests/modules/chromap/chromap/test.yml b/tests/modules/chromap/chromap/test.yml index d76370b2..d089922a 100644 --- a/tests/modules/chromap/chromap/test.yml +++ b/tests/modules/chromap/chromap/test.yml @@ -8,7 +8,6 @@ - path: output/chromap/test.bed.gz md5sum: 25e40bde24c7b447292cd68573728694 - path: output/chromap/versions.yml - md5sum: d24cfc35ad958206a5bc5694221b4fae - name: chromap chromap test_chromap_chromap_paired_end command: nextflow run ./tests/modules/chromap/chromap -entry test_chromap_chromap_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/chromap/chromap/nextflow.config @@ -20,7 +19,6 @@ - path: output/chromap/test.bed.gz md5sum: 7cdc8448882b75811e0c784f5f20aef2 - path: output/chromap/versions.yml - md5sum: 68ffe268a9d460956de4aad2a55ffd68 - name: chromap chromap test_chromap_chromap_paired_bam command: nextflow run ./tests/modules/chromap/chromap -entry test_chromap_chromap_paired_bam -c ./tests/config/nextflow.config -c ./tests/modules/chromap/chromap/nextflow.config @@ -30,6 +28,5 @@ files: - path: output/chromap/genome.index - path: output/chromap/test.bam - md5sum: df467417407408e42992dc3dd15b22f5 + md5sum: 0cb45628d1abe4b4359650040c501aef - path: output/chromap/versions.yml - md5sum: ea732b4c6f1312d09745b66c3963dd3f diff --git a/tests/modules/samblaster/test.yml b/tests/modules/samblaster/test.yml index acc6d0f0..8a87bd50 100644 --- a/tests/modules/samblaster/test.yml +++ b/tests/modules/samblaster/test.yml @@ -4,4 +4,4 @@ - samblaster files: - path: output/samblaster/test.processed.bam - md5sum: 950f23d85f75be1cf872f45c0144bdf4 + md5sum: 3009528be9f69e7fc8951921583b0016 diff --git a/tests/modules/samtools/ampliconclip/test.yml b/tests/modules/samtools/ampliconclip/test.yml index e9947562..cfc10dd1 100644 --- a/tests/modules/samtools/ampliconclip/test.yml +++ b/tests/modules/samtools/ampliconclip/test.yml @@ -5,7 +5,7 @@ - samtools/ampliconclip files: - path: output/samtools/test.bam - md5sum: 5d0e8bc9e6059ef3a63ee6328a3935c7 + md5sum: d270363a7fb3d96be2df211099efc75b - name: samtools ampliconclip no stats with rejects command: nextflow run ./tests/modules/samtools/ampliconclip -entry test_samtools_ampliconclip_no_stats_with_rejects -c ./tests/config/nextflow.config -c ./tests/modules/samtools/ampliconclip/nextflow.config @@ -14,9 +14,9 @@ - samtools/ampliconclip files: - path: output/samtools/test.bam - md5sum: 2c998295d624c59620b7ffdb0cc080e2 + md5sum: e7c4e64c259212e1670f6de96a5549b4 - path: output/samtools/test.cliprejects.bam - md5sum: f3ebba8d91ad29cc4d2d00943e6f6bab + md5sum: b7c057b11950c2271a0c92236bee94b7 - name: samtools ampliconclip with stats with rejects command: nextflow run ./tests/modules/samtools/ampliconclip -entry test_samtools_ampliconclip_with_stats_with_rejects -c ./tests/config/nextflow.config -c ./tests/modules/samtools/ampliconclip/nextflow.config @@ -25,8 +25,8 @@ - samtools/ampliconclip files: - path: output/samtools/test.bam - md5sum: 87882973b425ab27aad6ef18faf11f25 + md5sum: e75992d4ff69cbaed9a089231be86b5e - path: output/samtools/test.cliprejects.bam - md5sum: eb5e186e1a69864dc2e99a290f02ff78 + md5sum: 729f03e7a2801d2c56c32bef8f3d6ead - path: output/samtools/test.clipstats.txt md5sum: fc23355e1743d47f2541f2cb1a7a0cda diff --git a/tests/modules/samtools/faidx/test.yml b/tests/modules/samtools/faidx/test.yml index 1a49a0d5..346d5a0b 100644 --- a/tests/modules/samtools/faidx/test.yml +++ b/tests/modules/samtools/faidx/test.yml @@ -7,4 +7,3 @@ - path: output/samtools/genome.fasta.fai md5sum: 9da2a56e2853dc8c0b86a9e7229c9fe5 - path: output/samtools/versions.yml - md5sum: 6a16b2148a0ab43e6d0506056e6a0409 diff --git a/tests/modules/samtools/fixmate/test.yml b/tests/modules/samtools/fixmate/test.yml index 59cd6b41..c233f947 100644 --- a/tests/modules/samtools/fixmate/test.yml +++ b/tests/modules/samtools/fixmate/test.yml @@ -5,4 +5,4 @@ - samtools/fixmate files: - path: output/samtools/test.bam - md5sum: c7f574bb0c469e0ccfecb6b7210e03c5 + md5sum: 13805ea1a9212496a8cb4ce395b25119 diff --git a/tests/modules/samtools/mpileup/test.yml b/tests/modules/samtools/mpileup/test.yml index 405263d1..c3d794d0 100644 --- a/tests/modules/samtools/mpileup/test.yml +++ b/tests/modules/samtools/mpileup/test.yml @@ -7,7 +7,6 @@ - path: output/samtools/test.mpileup md5sum: 958e6bead4103d72026f80153b6b5150 - path: output/samtools/versions.yml - md5sum: 26350e1e145451f0b807911db029861e - name: samtools mpileup test_samtools_mpileup_intervals command: nextflow run tests/modules/samtools/mpileup -entry test_samtools_mpileup_intervals -c tests/config/nextflow.config @@ -18,4 +17,3 @@ - path: output/samtools/test.mpileup md5sum: 958e6bead4103d72026f80153b6b5150 - path: output/samtools/versions.yml - md5sum: 11d8118a558efb9db6798453862d719c diff --git a/tests/modules/samtools/sort/test.yml b/tests/modules/samtools/sort/test.yml index 4535dd09..e7bfd598 100644 --- a/tests/modules/samtools/sort/test.yml +++ b/tests/modules/samtools/sort/test.yml @@ -5,4 +5,4 @@ - samtools/sort files: - path: output/samtools/test.sorted.bam - md5sum: a73238d6b896a3a946025d6b13fe9525 + md5sum: f00f5d392fd5c531e1fd528d9f57b32b diff --git a/tests/modules/samtools/stats/test.yml b/tests/modules/samtools/stats/test.yml index 44b7ef8c..304619ee 100644 --- a/tests/modules/samtools/stats/test.yml +++ b/tests/modules/samtools/stats/test.yml @@ -5,7 +5,7 @@ - samtools files: - path: output/samtools/test.paired_end.sorted.bam.stats - md5sum: 6e3ca28b3e98dade14992dd7ea5fc886 + md5sum: c1e9ad551281b0bca32be1c832d125af - name: samtools stats test_samtools_stats_cram command: nextflow run ./tests/modules/samtools/stats -entry test_samtools_stats_cram -c ./tests/config/nextflow.config -c ./tests/modules/samtools/stats/nextflow.config @@ -14,4 +14,4 @@ - samtools files: - path: output/samtools/test.paired_end.recalibrated.sorted.cram.stats - md5sum: 985455b573444c3743510d603ed41f8c + md5sum: 103cd7b19743c42dab9ce570144c6f36 From 2d38566eca4cc15142b2ffa7c11837569b39aece Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Tue, 12 Apr 2022 08:35:36 +0200 Subject: [PATCH 107/283] Add MEGAN/RMA2INFO (#1513) * fix: remove left-over unnecessary code * Add megan/rma2info * Attempt at fixing test * Right yml formatting * Get the versios reporting correct --- modules/megan/rma2info/main.nf | 38 +++++++++++++++ modules/megan/rma2info/meta.yml | 51 ++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/megan/rma2info/main.nf | 16 ++++++ tests/modules/megan/rma2info/nextflow.config | 9 ++++ tests/modules/megan/rma2info/test.yml | 12 +++++ 6 files changed, 130 insertions(+) create mode 100644 modules/megan/rma2info/main.nf create mode 100644 modules/megan/rma2info/meta.yml create mode 100644 tests/modules/megan/rma2info/main.nf create mode 100644 tests/modules/megan/rma2info/nextflow.config create mode 100644 tests/modules/megan/rma2info/test.yml diff --git a/modules/megan/rma2info/main.nf b/modules/megan/rma2info/main.nf new file mode 100644 index 00000000..80d1975d --- /dev/null +++ b/modules/megan/rma2info/main.nf @@ -0,0 +1,38 @@ +process MEGAN_RMA2INFO { + tag "$meta.id" + label 'process_low' + + conda (params.enable_conda ? "bioconda::megan=6.21.7" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/megan:6.21.7--h9ee0642_0': + 'quay.io/biocontainers/megan:6.21.7--h9ee0642_0' }" + + input: + tuple val(meta), path(rma6) + val(megan_summary) + + output: + tuple val(meta), path("*.txt.gz") , emit: txt + tuple val(meta), path("*.megan"), optional: true, emit: megan_summary + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def summary = megan_summary ? "-es ${prefix}.megan" : "" + """ + rma2info \\ + -i ${rma6} \\ + -o ${prefix}.txt.gz \\ + ${summary} \\ + $args + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + megan: \$(echo \$(rma2info 2>&1) | grep version | sed 's/.*version //g;s/, built.*//g') + END_VERSIONS + """ +} diff --git a/modules/megan/rma2info/meta.yml b/modules/megan/rma2info/meta.yml new file mode 100644 index 00000000..0f2d5a9b --- /dev/null +++ b/modules/megan/rma2info/meta.yml @@ -0,0 +1,51 @@ +name: "megan_rma2info" +description: Analyses an RMA file and exports information in text format +keywords: + - megan + - rma6 + - classification + - conversion +tools: + - "megan": + description: "A tool for studying the taxonomic content of a set of DNA reads" + homepage: "https://uni-tuebingen.de/fakultaeten/mathematisch-naturwissenschaftliche-fakultaet/fachbereiche/informatik/lehrstuehle/algorithms-in-bioinformatics/software/megan6/" + documentation: "https://software-ab.informatik.uni-tuebingen.de/download/megan6/welcome.html" + tool_dev_url: "https://github.com/husonlab/megan-ce" + doi: "10.1371/journal.pcbi.1004957" + licence: "['GPL >=3']" + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - rma6: + type: file + description: RMA6 file from MEGAN or MALT + pattern: "*.rma6" + - megan_summary: + type: boolean + description: Specify whether to generate an MEGAN summary file + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - txt: + type: file + description: Compressed text file + pattern: "*.txt.gz" + - megan_summary: + type: file + description: Optionally generated MEGAN summary file + pattern: "*.megan" + +authors: + - "@jfy133" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index c0e84cbc..94bf4e91 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -1138,6 +1138,10 @@ megahit: - modules/megahit/** - tests/modules/megahit/** +megan/rma2info: + - modules/megan/rma2info/** + - tests/modules/megan/rma2info/** + meningotype: - modules/meningotype/** - tests/modules/meningotype/** diff --git a/tests/modules/megan/rma2info/main.nf b/tests/modules/megan/rma2info/main.nf new file mode 100644 index 00000000..edbe9a49 --- /dev/null +++ b/tests/modules/megan/rma2info/main.nf @@ -0,0 +1,16 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { MEGAN_RMA2INFO } from '../../../../modules/megan/rma2info/main.nf' + +workflow test_megan_rma2info { + + input = [ + [ id:'test', single_end:false ], // meta map + file('https://github.com/nf-core/test-datasets/raw/a7e61654553887475a2f7178108587ecd9b54608/data/delete_me/malt/test.rma6', checkIfExists: true) + ] + megan_summary = true + + MEGAN_RMA2INFO ( input, megan_summary ) +} diff --git a/tests/modules/megan/rma2info/nextflow.config b/tests/modules/megan/rma2info/nextflow.config new file mode 100644 index 00000000..3fd8dcdb --- /dev/null +++ b/tests/modules/megan/rma2info/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: MEGAN_RMA2INFO { + ext.args = "-c2c Taxonomy" + } + +} diff --git a/tests/modules/megan/rma2info/test.yml b/tests/modules/megan/rma2info/test.yml new file mode 100644 index 00000000..dc845bea --- /dev/null +++ b/tests/modules/megan/rma2info/test.yml @@ -0,0 +1,12 @@ +- name: megan rma2info test_megan_rma2info + command: nextflow run tests/modules/megan/rma2info -entry test_megan_rma2info -c tests/config/nextflow.config + tags: + - megan + - megan/rma2info + files: + - path: output/megan/test.megan + contains: + - "@Creator" + - path: output/megan/test.txt.gz + md5sum: 5c3b876aa0abef12158bcd7c3702740f + - path: output/megan/versions.yml From ffedf09b6e84b479c9c901274f74bb33f3777243 Mon Sep 17 00:00:00 2001 From: FriederikeHanssen Date: Tue, 12 Apr 2022 10:40:06 +0200 Subject: [PATCH 108/283] Revert manta changes (#1518) * Fix typo * Revert to have target bed matched with sample info * Add comment on design decision --- modules/manta/germline/main.nf | 5 ++--- tests/modules/manta/germline/main.nf | 26 +++++++++++--------------- 2 files changed, 13 insertions(+), 18 deletions(-) diff --git a/modules/manta/germline/main.nf b/modules/manta/germline/main.nf index 5ddba51b..c680dc9d 100644 --- a/modules/manta/germline/main.nf +++ b/modules/manta/germline/main.nf @@ -8,11 +8,10 @@ process MANTA_GERMLINE { 'quay.io/biocontainers/manta:1.6.0--h9ee0642_1' }" input: - tuple val(meta), path(input), path(index) + //Matching the target bed with the input sample allows to parallelize the same sample run across different intervals or a single bed file + tuple val(meta), path(input), path(index), path(target_bed), path(target_bed_tbi) path fasta path fasta_fai - tuple path(target_bed), path(target_bed_tbi) - output: tuple val(meta), path("*candidate_small_indels.vcf.gz") , emit: candidate_small_indels_vcf diff --git a/tests/modules/manta/germline/main.nf b/tests/modules/manta/germline/main.nf index bad62629..5f6687b2 100644 --- a/tests/modules/manta/germline/main.nf +++ b/tests/modules/manta/germline/main.nf @@ -8,29 +8,27 @@ workflow test_manta_germline { input = [ [ id:'test'], // meta map [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram'], checkIfExists: true)], - [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram_crai'], checkIfExists: true)] + [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram_crai'], checkIfExists: true)], + [],[] ] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) - bed = [[],[]] - MANTA_GERMLINE ( input, fasta, fai, bed ) + MANTA_GERMLINE ( input, fasta, fai ) } workflow test_manta_germline_target_bed { input = [ [ id:'test'], // meta map [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram'], checkIfExists: true)], - [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram_crai'], checkIfExists: true)] - ] - fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) - fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) - bed = [ + [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram_crai'], checkIfExists: true)], file(params.test_data['homo_sapiens']['genome']['genome_bed_gz'], checkIfExists: true), file(params.test_data['homo_sapiens']['genome']['genome_bed_gz_tbi'], checkIfExists: true), ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) - MANTA_GERMLINE ( input, fasta, fai, bed ) + MANTA_GERMLINE ( input, fasta, fai ) } workflow test_manta_germline_target_bed_jointcalling { @@ -39,14 +37,12 @@ workflow test_manta_germline_target_bed_jointcalling { [file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram'], checkIfExists: true), file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_cram'], checkIfExists: true)], [file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram_crai'], checkIfExists: true), - file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_cram_crai'], checkIfExists: true),] - ] - fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) - fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) - bed = [ + file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_cram_crai'], checkIfExists: true),], file(params.test_data['homo_sapiens']['genome']['genome_bed_gz'], checkIfExists: true), file(params.test_data['homo_sapiens']['genome']['genome_bed_gz_tbi'], checkIfExists: true), ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) - MANTA_GERMLINE ( input, fasta, fai, bed ) + MANTA_GERMLINE ( input, fasta, fai ) } From 09125979cce65350cf50948cf4233a6ce2804bec Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Tue, 12 Apr 2022 11:02:10 +0200 Subject: [PATCH 109/283] Tool/snap aligner paired (#1509) * first commit * edit main.nf * edit tests * run prettier * fix test * indent script * Update modules/snapaligner/paired/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/snapaligner/paired/meta.yml Co-authored-by: James A. Fellows Yates * fix version nr * update meta * fix versions Co-authored-by: James A. Fellows Yates --- modules/snapaligner/paired/main.nf | 41 ++++++++++++++++ modules/snapaligner/paired/meta.yml | 48 +++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/snapaligner/paired/main.nf | 17 +++++++ .../snapaligner/paired/nextflow.config | 5 ++ tests/modules/snapaligner/paired/test.yml | 9 ++++ 6 files changed, 124 insertions(+) create mode 100644 modules/snapaligner/paired/main.nf create mode 100644 modules/snapaligner/paired/meta.yml create mode 100644 tests/modules/snapaligner/paired/main.nf create mode 100644 tests/modules/snapaligner/paired/nextflow.config create mode 100644 tests/modules/snapaligner/paired/test.yml diff --git a/modules/snapaligner/paired/main.nf b/modules/snapaligner/paired/main.nf new file mode 100644 index 00000000..57044893 --- /dev/null +++ b/modules/snapaligner/paired/main.nf @@ -0,0 +1,41 @@ +process SNAPALIGNER_PAIRED { + tag '$meta.id' + label 'process_high' + + conda (params.enable_conda ? "bioconda::snap-aligner=2.0.1" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/snap-aligner:2.0.1--hd03093a_1': + 'quay.io/biocontainers/snap-aligner:2.0.1--hd03093a_1' }" + + input: + tuple val(meta), path(reads) + path index + + output: + tuple val(meta), path("*.bam"), emit: bam + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + + """ + mkdir -p index + mv $index index/ + + snap-aligner paired \\ + index \\ + ${reads.join(" ")} \\ + -o -bam ${prefix}.bam \\ + -t ${task.cpus} \\ + $args + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + snapaligner: \$(snap-aligner 2>&1| head -n 1 | sed 's/^.*version //;s/.\$//') + END_VERSIONS + """ +} diff --git a/modules/snapaligner/paired/meta.yml b/modules/snapaligner/paired/meta.yml new file mode 100644 index 00000000..b19e0174 --- /dev/null +++ b/modules/snapaligner/paired/meta.yml @@ -0,0 +1,48 @@ +name: "snapaligner_paired" +description: Performs paired end fastq alignment to a fasta reference using SNAP +keywords: + - alignment + - map + - fastq + - bam + - sam +tools: + - "snapaligner": + description: "Scalable Nucleotide Alignment Program -- a fast and accurate read aligner for high-throughput sequencing data" + homepage: "http://snap.cs.berkeley.edu" + documentation: "https://1drv.ms/b/s!AhuEg_0yZD86hcpblUt-muHKYsG8fA?e=R8ogug" + tool_dev_url: "https://github.com/amplab/snap" + doi: "10.1101/2021.11.23.469039" + licence: "['Apache v2']" +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: List of input fastq files of size 2 for fastq or 1 for bam + pattern: "*.{fastq.gz,fq.gz,fastq,fq,bam}" + - index: + type: file + description: List of SNAP genome index files + pattern: "{Genome,GenomeIndex,GenomeIndexHash,OverflowTable}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - bam: + type: file + description: Aligned BAM file + pattern: "*.{bam}" + +authors: + - "@matthdsm" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 94bf4e91..b195968f 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -1675,6 +1675,10 @@ snapaligner/index: - modules/snapaligner/index/** - tests/modules/snapaligner/index/** +snapaligner/paired: + - modules/snapaligner/paired/** + - tests/modules/snapaligner/paired/** + snpdists: - modules/snpdists/** - tests/modules/snpdists/** diff --git a/tests/modules/snapaligner/paired/main.nf b/tests/modules/snapaligner/paired/main.nf new file mode 100644 index 00000000..b25ca8c2 --- /dev/null +++ b/tests/modules/snapaligner/paired/main.nf @@ -0,0 +1,17 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { SNAPALIGNER_INDEX } from '../../../../modules/snapaligner/index/main.nf' +include { SNAPALIGNER_PAIRED } from '../../../../modules/snapaligner/paired/main.nf' + +workflow test_snapaligner_paired { + + input = [ + [ id:'test', single_end:false ], // meta map + [file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true)] + ] + + SNAPALIGNER_INDEX ( file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true),[],[],[]) + SNAPALIGNER_PAIRED ( input, SNAPALIGNER_INDEX.out.index ) +} diff --git a/tests/modules/snapaligner/paired/nextflow.config b/tests/modules/snapaligner/paired/nextflow.config new file mode 100644 index 00000000..50f50a7a --- /dev/null +++ b/tests/modules/snapaligner/paired/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} \ No newline at end of file diff --git a/tests/modules/snapaligner/paired/test.yml b/tests/modules/snapaligner/paired/test.yml new file mode 100644 index 00000000..7df1e02b --- /dev/null +++ b/tests/modules/snapaligner/paired/test.yml @@ -0,0 +1,9 @@ +- name: snapaligner paired test_snapaligner_paired + command: nextflow run tests/modules/snapaligner/paired -entry test_snapaligner_paired -c tests/config/nextflow.config + tags: + - snapaligner + - snapaligner/paired + files: + - path: output/snapaligner/test.bam + md5sum: 2ac92e9539fa246dd6db52b5de56fca5 + - path: output/snapaligner/versions.yml From b59713e6230cd5966d176cfd23288074612ede8f Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Tue, 12 Apr 2022 11:20:35 +0200 Subject: [PATCH 110/283] Tool/snap aligner single (#1510) * first commit * add tool * fix tests * fix indents * Update modules/snapaligner/single/meta.yml Co-authored-by: James A. Fellows Yates * fix comments * fix versions * prettier Co-authored-by: James A. Fellows Yates --- modules/snapaligner/single/main.nf | 41 ++++++++++++++++ modules/snapaligner/single/meta.yml | 48 +++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/snapaligner/single/main.nf | 17 +++++++ .../snapaligner/single/nextflow.config | 5 ++ tests/modules/snapaligner/single/test.yml | 9 ++++ 6 files changed, 124 insertions(+) create mode 100644 modules/snapaligner/single/main.nf create mode 100644 modules/snapaligner/single/meta.yml create mode 100644 tests/modules/snapaligner/single/main.nf create mode 100644 tests/modules/snapaligner/single/nextflow.config create mode 100644 tests/modules/snapaligner/single/test.yml diff --git a/modules/snapaligner/single/main.nf b/modules/snapaligner/single/main.nf new file mode 100644 index 00000000..b13e1153 --- /dev/null +++ b/modules/snapaligner/single/main.nf @@ -0,0 +1,41 @@ +process SNAPALIGNER_SINGLE { + tag '$meta.id' + label 'process_high' + + conda (params.enable_conda ? "bioconda::snap-aligner=2.0.1" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/snap-aligner:2.0.1--hd03093a_1': + 'quay.io/biocontainers/snap-aligner:2.0.1--hd03093a_1' }" + + input: + tuple val(meta), path(reads) + path index + + output: + tuple val(meta), path("*.bam"), emit: bam + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + + """ + mkdir -p index + mv $index index/ + + snap-aligner single \\ + index \\ + ${reads.join(" ")} \\ + -o -bam ${prefix}.bam \\ + -t ${task.cpus} \\ + $args + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + snapaligner: \$(snap-aligner 2>&1| head -n 1 | sed 's/^.*version //;s/.\$//') + END_VERSIONS + """ +} diff --git a/modules/snapaligner/single/meta.yml b/modules/snapaligner/single/meta.yml new file mode 100644 index 00000000..e69cc721 --- /dev/null +++ b/modules/snapaligner/single/meta.yml @@ -0,0 +1,48 @@ +name: "snapaligner_single" +description: Performs single end fastq alignment to a fasta reference using SNAP +keywords: + - alignment + - map + - fastq + - bam + - sam +tools: + - "snapaligner": + description: "Scalable Nucleotide Alignment Program -- a fast and accurate read aligner for high-throughput sequencing data" + homepage: "http://snap.cs.berkeley.edu" + documentation: "https://1drv.ms/b/s!AhuEg_0yZD86hcpblUt-muHKYsG8fA?e=R8ogug" + tool_dev_url: "https://github.com/amplab/snap" + doi: "10.1101/2021.11.23.469039" + licence: "['Apache v2']" +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: List of single end input files + pattern: "*.{fastq.gz,fq.gz,fastq,fq,bam}" + - index: + type: file + description: List of SNAP genome index files + pattern: "{Genome,GenomeIndex,GenomeIndexHash,OverflowTable}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - bam: + type: file + description: Aligned BAM file + pattern: "*.{bam}" + +authors: + - "@matthdsm" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index b195968f..cd4913cf 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -1679,6 +1679,10 @@ snapaligner/paired: - modules/snapaligner/paired/** - tests/modules/snapaligner/paired/** +snapaligner/single: + - modules/snapaligner/single/** + - tests/modules/snapaligner/single/** + snpdists: - modules/snpdists/** - tests/modules/snpdists/** diff --git a/tests/modules/snapaligner/single/main.nf b/tests/modules/snapaligner/single/main.nf new file mode 100644 index 00000000..616e517a --- /dev/null +++ b/tests/modules/snapaligner/single/main.nf @@ -0,0 +1,17 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { SNAPALIGNER_INDEX } from '../../../../modules/snapaligner/index/main.nf' +include { SNAPALIGNER_SINGLE } from '../../../../modules/snapaligner/single/main.nf' + +workflow test_snapaligner_single { + + input = [ + [ id:'test', single_end:false ], // meta map + [file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true)] + ] + + SNAPALIGNER_INDEX ( file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true),[],[],[]) + SNAPALIGNER_SINGLE ( input, SNAPALIGNER_INDEX.out.index ) +} diff --git a/tests/modules/snapaligner/single/nextflow.config b/tests/modules/snapaligner/single/nextflow.config new file mode 100644 index 00000000..50f50a7a --- /dev/null +++ b/tests/modules/snapaligner/single/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} \ No newline at end of file diff --git a/tests/modules/snapaligner/single/test.yml b/tests/modules/snapaligner/single/test.yml new file mode 100644 index 00000000..bbcbba1f --- /dev/null +++ b/tests/modules/snapaligner/single/test.yml @@ -0,0 +1,9 @@ +- name: snapaligner single test_snapaligner_single + command: nextflow run tests/modules/snapaligner/single -entry test_snapaligner_single -c tests/config/nextflow.config + tags: + - snapaligner/single + - snapaligner + files: + - path: output/snapaligner/test.bam + md5sum: 696f7ea8e1aa5f9d7dafb9d0134fe25d + - path: output/snapaligner/versions.yml From 409af2f27cbe45109acc7fee70718d2bf20aa449 Mon Sep 17 00:00:00 2001 From: "Maxime U. Garcia" Date: Tue, 12 Apr 2022 17:15:39 +0200 Subject: [PATCH 111/283] Improve syntax/logic coherence in all gatk4 plugins (#1459) * feat: code polishing * Apply suggestions from code review Co-authored-by: FriederikeHanssen * code polishing * more code polishing * code polishing * tests for applybqsrspark * fix typo * no need to check md5sum for versions.yml * fix: use correct syntax * code polishing again * add tests for markduplicatesspark * simplify mergevcfs tests * add tests for baserecalibratorspark * fix: path to entry * code polishing * fix linting * simplify module * update meta.yml * fix pair mode * fix: MITO mode * more tests * fix command * bad copy paste * fix typos * fix tests * fix test * update meta.yml * correct versions.yml in all test.yml * code polishing * code polishing * more code polishing * fix args * add tmpdir for all Co-authored-by: FriederikeHanssen --- modules/gatk4/applybqsr/main.nf | 15 +- modules/gatk4/applybqsr/meta.yml | 4 + modules/gatk4/applybqsrspark/main.nf | 51 +++++++ modules/gatk4/applybqsrspark/meta.yml | 72 ++++++++++ modules/gatk4/applyvqsr/main.nf | 23 ++-- modules/gatk4/applyvqsr/meta.yml | 8 +- modules/gatk4/baserecalibrator/main.nf | 33 +++-- modules/gatk4/baserecalibrator/meta.yml | 10 +- modules/gatk4/baserecalibratorspark/main.nf | 53 +++++++ modules/gatk4/baserecalibratorspark/meta.yml | 72 ++++++++++ modules/gatk4/bedtointervallist/main.nf | 10 +- modules/gatk4/calculatecontamination/main.nf | 11 +- modules/gatk4/calculatecontamination/meta.yml | 6 +- modules/gatk4/combinegvcfs/main.nf | 24 ++-- modules/gatk4/combinegvcfs/meta.yml | 32 +++-- .../gatk4/createsequencedictionary/main.nf | 9 +- .../gatk4/createsomaticpanelofnormals/main.nf | 17 +-- .../createsomaticpanelofnormals/meta.yml | 2 +- .../gatk4/estimatelibrarycomplexity/main.nf | 24 ++-- .../gatk4/estimatelibrarycomplexity/meta.yml | 3 +- modules/gatk4/fastqtosam/main.nf | 10 +- modules/gatk4/fastqtosam/meta.yml | 8 +- modules/gatk4/filtermutectcalls/main.nf | 39 +++--- modules/gatk4/filtermutectcalls/meta.yml | 13 +- modules/gatk4/gatherbqsrreports/main.nf | 15 +- modules/gatk4/gatherbqsrreports/meta.yml | 10 +- modules/gatk4/gatherpileupsummaries/main.nf | 17 +-- modules/gatk4/gatherpileupsummaries/meta.yml | 9 +- modules/gatk4/genomicsdbimport/main.nf | 37 ++--- modules/gatk4/genotypegvcfs/main.nf | 29 ++-- modules/gatk4/genotypegvcfs/meta.yml | 20 ++- modules/gatk4/getpileupsummaries/main.nf | 27 ++-- modules/gatk4/haplotypecaller/main.nf | 35 +++-- modules/gatk4/indexfeaturefile/main.nf | 9 +- modules/gatk4/intervallisttobed/main.nf | 5 +- modules/gatk4/intervallisttools/main.nf | 13 +- .../gatk4/learnreadorientationmodel/main.nf | 12 +- modules/gatk4/markduplicates/main.nf | 10 +- modules/gatk4/markduplicates/meta.yml | 1 + modules/gatk4/markduplicatesspark/main.nf | 50 +++++++ modules/gatk4/markduplicatesspark/meta.yml | 60 ++++++++ modules/gatk4/mergebamalignment/main.nf | 10 +- modules/gatk4/mergemutectstats/main.nf | 8 +- modules/gatk4/mergevcfs/main.nf | 20 ++- modules/gatk4/mutect2/main.nf | 44 ++---- modules/gatk4/mutect2/meta.yml | 16 --- modules/gatk4/revertsam/main.nf | 6 +- modules/gatk4/samtofastq/main.nf | 6 +- modules/gatk4/selectvariants/main.nf | 6 +- modules/gatk4/splitncigarreads/main.nf | 8 +- modules/gatk4/variantfiltration/main.nf | 10 +- modules/gatk4/variantrecalibrator/main.nf | 23 ++-- tests/modules/gatk4/applybqsr/test.yml | 3 - tests/modules/gatk4/applybqsrspark/main.nf | 47 +++++++ .../gatk4/applybqsrspark/nextflow.config | 5 + tests/modules/gatk4/applybqsrspark/test.yml | 29 ++++ tests/modules/gatk4/applyvqsr/test.yml | 2 - tests/modules/gatk4/baserecalibrator/test.yml | 4 + .../gatk4/baserecalibratorspark/main.nf | 69 ++++++++++ .../baserecalibratorspark/nextflow.config | 5 + .../gatk4/baserecalibratorspark/test.yml | 39 ++++++ .../modules/gatk4/bedtointervallist/test.yml | 1 + .../gatk4/calculatecontamination/main.nf | 15 +- .../calculatecontamination/nextflow.config | 4 + .../gatk4/calculatecontamination/test.yml | 3 - tests/modules/gatk4/combinegvcfs/test.yml | 1 - .../gatk4/createsequencedictionary/test.yml | 1 + .../createsomaticpanelofnormals/test.yml | 1 + .../gatk4/estimatelibrarycomplexity/test.yml | 1 + tests/modules/gatk4/fastqtosam/test.yml | 2 - .../modules/gatk4/filtermutectcalls/test.yml | 3 + .../modules/gatk4/gatherbqsrreports/test.yml | 2 - .../gatk4/gatherpileupsummaries/test.yml | 1 + tests/modules/gatk4/genomicsdbimport/test.yml | 9 -- tests/modules/gatk4/genotypegvcfs/main.nf | 129 +++++++++--------- tests/modules/gatk4/genotypegvcfs/test.yml | 9 ++ .../modules/gatk4/getpileupsummaries/test.yml | 3 - tests/modules/gatk4/haplotypecaller/test.yml | 3 + tests/modules/gatk4/indexfeaturefile/test.yml | 4 +- .../modules/gatk4/intervallisttobed/test.yml | 1 + .../modules/gatk4/intervallisttools/test.yml | 1 + .../gatk4/learnreadorientationmodel/test.yml | 1 + .../gatk4/markduplicates/nextflow.config | 4 + tests/modules/gatk4/markduplicates/test.yml | 2 - .../modules/gatk4/markduplicatesspark/main.nf | 28 ++++ .../gatk4/markduplicatesspark/nextflow.config | 5 + .../gatk4/markduplicatesspark/test.yml | 25 ++++ .../modules/gatk4/mergebamalignment/test.yml | 1 + tests/modules/gatk4/mergemutectstats/test.yml | 1 + tests/modules/gatk4/mergevcfs/main.nf | 24 ++-- tests/modules/gatk4/mergevcfs/test.yml | 6 +- tests/modules/gatk4/mutect2/main.nf | 81 +++++------ tests/modules/gatk4/mutect2/nextflow.config | 12 ++ tests/modules/gatk4/mutect2/test.yml | 16 +++ tests/modules/gatk4/revertsam/test.yml | 1 + tests/modules/gatk4/samtofastq/test.yml | 2 + tests/modules/gatk4/selectvariants/test.yml | 2 - tests/modules/gatk4/splitncigarreads/test.yml | 1 - .../modules/gatk4/variantfiltration/test.yml | 2 + .../modules/gatk4/variantrecalibrator/main.nf | 88 ++++++------ .../gatk4/variantrecalibrator/nextflow.config | 1 + .../gatk4/variantrecalibrator/test.yml | 2 + 102 files changed, 1209 insertions(+), 558 deletions(-) create mode 100644 modules/gatk4/applybqsrspark/main.nf create mode 100644 modules/gatk4/applybqsrspark/meta.yml create mode 100644 modules/gatk4/baserecalibratorspark/main.nf create mode 100644 modules/gatk4/baserecalibratorspark/meta.yml create mode 100644 modules/gatk4/markduplicatesspark/main.nf create mode 100644 modules/gatk4/markduplicatesspark/meta.yml create mode 100644 tests/modules/gatk4/applybqsrspark/main.nf create mode 100644 tests/modules/gatk4/applybqsrspark/nextflow.config create mode 100644 tests/modules/gatk4/applybqsrspark/test.yml create mode 100644 tests/modules/gatk4/baserecalibratorspark/main.nf create mode 100644 tests/modules/gatk4/baserecalibratorspark/nextflow.config create mode 100644 tests/modules/gatk4/baserecalibratorspark/test.yml create mode 100644 tests/modules/gatk4/markduplicatesspark/main.nf create mode 100644 tests/modules/gatk4/markduplicatesspark/nextflow.config create mode 100644 tests/modules/gatk4/markduplicatesspark/test.yml diff --git a/modules/gatk4/applybqsr/main.nf b/modules/gatk4/applybqsr/main.nf index 851afc04..7a64dab2 100644 --- a/modules/gatk4/applybqsr/main.nf +++ b/modules/gatk4/applybqsr/main.nf @@ -14,9 +14,9 @@ process GATK4_APPLYBQSR { path dict output: - tuple val(meta), path("*.bam"), emit: bam, optional: true + tuple val(meta), path("*.bam") , emit: bam, optional: true tuple val(meta), path("*.cram"), emit: cram, optional: true - path "versions.yml" , emit: versions + path "versions.yml" , emit: versions when: task.ext.when == null || task.ext.when @@ -24,8 +24,7 @@ process GATK4_APPLYBQSR { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" - def interval = intervals ? "-L ${intervals}" : "" - def file_type = input.getExtension() + def interval_command = intervals ? "--intervals $intervals" : "" def avail_mem = 3 if (!task.memory) { @@ -35,12 +34,12 @@ process GATK4_APPLYBQSR { } """ gatk --java-options "-Xmx${avail_mem}g" ApplyBQSR \\ - -R $fasta \\ - -I $input \\ + --input $input \\ + --output ${prefix}.${input.getExtension()} \\ + --reference $fasta \\ --bqsr-recal-file $bqsr_table \\ - $interval \\ + $interval_command \\ --tmp-dir . \\ - -O ${prefix}.${file_type} \\ $args cat <<-END_VERSIONS > versions.yml diff --git a/modules/gatk4/applybqsr/meta.yml b/modules/gatk4/applybqsr/meta.yml index 82d3cbf3..3fc93f10 100644 --- a/modules/gatk4/applybqsr/meta.yml +++ b/modules/gatk4/applybqsr/meta.yml @@ -61,6 +61,10 @@ output: type: file description: Recalibrated BAM file pattern: "*.{bam}" + - cram: + type: file + description: Recalibrated CRAM file + pattern: "*.{cram}" authors: - "@yocra3" diff --git a/modules/gatk4/applybqsrspark/main.nf b/modules/gatk4/applybqsrspark/main.nf new file mode 100644 index 00000000..04303c09 --- /dev/null +++ b/modules/gatk4/applybqsrspark/main.nf @@ -0,0 +1,51 @@ +process GATK4_APPLYBQSR_SPARK { + tag "$meta.id" + label 'process_low' + + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'quay.io/biocontainers/gatk4:4.2.3.0--hdfd78af_0' }" + + input: + tuple val(meta), path(input), path(input_index), path(bqsr_table), path(intervals) + path fasta + path fai + path dict + + output: + tuple val(meta), path("*.bam") , emit: bam, optional: true + tuple val(meta), path("*.cram"), emit: cram, optional: true + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def interval_command = intervals ? "--intervals $intervals" : "" + + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK ApplyBQSRSpark] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } + """ + gatk --java-options "-Xmx${avail_mem}g" ApplyBQSRSpark \\ + --input $input \\ + --output ${prefix}.${input.getExtension()} \\ + --reference $fasta \\ + --bqsr-recal-file $bqsr_table \\ + $interval_command \\ + --spark-master local[${task.cpus}] \\ + --tmp-dir . \\ + $args + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + END_VERSIONS + """ +} diff --git a/modules/gatk4/applybqsrspark/meta.yml b/modules/gatk4/applybqsrspark/meta.yml new file mode 100644 index 00000000..070b37ab --- /dev/null +++ b/modules/gatk4/applybqsrspark/meta.yml @@ -0,0 +1,72 @@ +name: gatk4_applybqsr_spark +description: Apply base quality score recalibration (BQSR) to a bam file +keywords: + - bqsr + - bam +tools: + - gatk4: + description: | + Developed in the Data Sciences Platform at the Broad Institute, the toolkit offers a wide variety of tools + with a primary focus on variant discovery and genotyping. Its powerful processing engine + and high-performance computing features make it capable of taking on projects of any size. + homepage: https://gatk.broadinstitute.org/hc/en-us + documentation: https://gatk.broadinstitute.org/hc/en-us/categories/360002369672s + doi: 10.1158/1538-7445.AM2017-3590 + licence: ["Apache-2.0"] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - input: + type: file + description: BAM/CRAM file from alignment + pattern: "*.{bam,cram}" + - input_index: + type: file + description: BAI/CRAI file from alignment + pattern: "*.{bai,crai}" + - bqsr_table: + type: file + description: Recalibration table from gatk4_baserecalibrator + - intervals: + type: file + description: Bed file with the genomic regions included in the library (optional) + - fasta: + type: file + description: The reference fasta file + pattern: "*.fasta" + - fai: + type: file + description: Index of reference fasta file + pattern: "*.fasta.fai" + - dict: + type: file + description: GATK sequence dictionary + pattern: "*.dict" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - bam: + type: file + description: Recalibrated BAM file + pattern: "*.{bam}" + - cram: + type: file + description: Recalibrated CRAM file + pattern: "*.{cram}" + +authors: + - "@yocra3" + - "@FriederikeHanssen" + - "@maxulysse" diff --git a/modules/gatk4/applyvqsr/main.nf b/modules/gatk4/applyvqsr/main.nf index 3049aa79..8b235809 100644 --- a/modules/gatk4/applyvqsr/main.nf +++ b/modules/gatk4/applyvqsr/main.nf @@ -8,15 +8,15 @@ process GATK4_APPLYVQSR { 'quay.io/biocontainers/gatk4:4.2.5.0--hdfd78af_0' }" input: - tuple val(meta), path(vcf), path(tbi), path(recal), path(recalidx), path(tranches) - path fasta - path fai - path dict + tuple val(meta), path(vcf), path(vcf_tbi), path(recal), path(recal_index), path(tranches) + path fasta + path fai + path dict output: - tuple val(meta), path("*.vcf.gz") , emit: vcf - tuple val(meta), path("*.tbi") , emit: tbi - path "versions.yml" , emit: versions + tuple val(meta), path("*.vcf.gz"), emit: vcf + tuple val(meta), path("*.tbi") , emit: tbi + path "versions.yml" , emit: versions when: task.ext.when == null || task.ext.when @@ -24,7 +24,7 @@ process GATK4_APPLYVQSR { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" - refCommand = fasta ? "-R ${fasta} " : '' + def reference_command = fasta ? "--reference $fasta" : '' def avail_mem = 3 if (!task.memory) { @@ -34,11 +34,12 @@ process GATK4_APPLYVQSR { } """ gatk --java-options "-Xmx${avail_mem}g" ApplyVQSR \\ - ${refCommand} \\ - -V ${vcf} \\ - -O ${prefix}.vcf.gz \\ + --variant ${vcf} \\ + --output ${prefix}.vcf.gz \\ + $reference_command \\ --tranches-file $tranches \\ --recal-file $recal \\ + --tmp-dir . \\ $args cat <<-END_VERSIONS > versions.yml diff --git a/modules/gatk4/applyvqsr/meta.yml b/modules/gatk4/applyvqsr/meta.yml index 4a99db45..a05813d1 100644 --- a/modules/gatk4/applyvqsr/meta.yml +++ b/modules/gatk4/applyvqsr/meta.yml @@ -29,20 +29,20 @@ input: type: file description: VCF file to be recalibrated, this should be the same file as used for the first stage VariantRecalibrator. pattern: "*.vcf" - - tbi: + - vcf_tbi: type: file - description: Tbi index for the input vcf file. + description: tabix index for the input vcf file. pattern: "*.vcf.tbi" - recal: type: file description: Recalibration file produced when the input vcf was run through VariantRecalibrator in stage 1. pattern: "*.recal" - - recalidx: + - recal_index: type: file description: Index file for the recalibration file. pattern: ".recal.idx" - tranches: - type: boolean + type: file description: Tranches file produced when the input vcf was run through VariantRecalibrator in stage 1. pattern: ".tranches" - fasta: diff --git a/modules/gatk4/baserecalibrator/main.nf b/modules/gatk4/baserecalibrator/main.nf index ecb41d9b..766a8338 100644 --- a/modules/gatk4/baserecalibrator/main.nf +++ b/modules/gatk4/baserecalibrator/main.nf @@ -9,15 +9,15 @@ process GATK4_BASERECALIBRATOR { input: tuple val(meta), path(input), path(input_index), path(intervals) - path fasta - path fai - path dict - path knownSites - path knownSites_tbi + path fasta + path fai + path dict + path known_sites + path known_sites_tbi output: tuple val(meta), path("*.table"), emit: table - path "versions.yml" , emit: versions + path "versions.yml" , emit: versions when: task.ext.when == null || task.ext.when @@ -25,8 +25,8 @@ process GATK4_BASERECALIBRATOR { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" - def intervalsCommand = intervals ? "-L ${intervals}" : "" - def sitesCommand = knownSites.collect{"--known-sites ${it}"}.join(' ') + def interval_command = intervals ? "--intervals $intervals" : "" + def sites_command = known_sites.collect{"--known-sites $it"}.join(' ') def avail_mem = 3 if (!task.memory) { @@ -34,16 +34,15 @@ process GATK4_BASERECALIBRATOR { } else { avail_mem = task.memory.giga } - """ - gatk --java-options "-Xmx${avail_mem}g" BaseRecalibrator \ - -R $fasta \ - -I $input \ - $sitesCommand \ - $intervalsCommand \ - --tmp-dir . \ - $args \ - -O ${prefix}.table + gatk --java-options "-Xmx${avail_mem}g" BaseRecalibrator \\ + --input $input \\ + --output ${prefix}.table \\ + --reference $fasta \\ + $interval_command \\ + $sites_command \\ + --tmp-dir . \\ + $args cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/modules/gatk4/baserecalibrator/meta.yml b/modules/gatk4/baserecalibrator/meta.yml index 2e52b8ab..08c1ebbf 100644 --- a/modules/gatk4/baserecalibrator/meta.yml +++ b/modules/gatk4/baserecalibrator/meta.yml @@ -42,9 +42,14 @@ input: type: file description: GATK sequence dictionary pattern: "*.dict" - - knownSites: + - known_sites: type: file - description: Bed file with the genomic regions included in the library (optional) + description: VCF files with known sites for indels / snps (optional) + pattern: "*.vcf.gz" + - known_sites_tbi: + type: file + description: Tabix index of the known_sites (optional) + pattern: "*.vcf.gz.tbi" output: - meta: @@ -64,3 +69,4 @@ output: authors: - "@yocra3" - "@FriederikeHanssen" + - "@maxulysse" diff --git a/modules/gatk4/baserecalibratorspark/main.nf b/modules/gatk4/baserecalibratorspark/main.nf new file mode 100644 index 00000000..70c70181 --- /dev/null +++ b/modules/gatk4/baserecalibratorspark/main.nf @@ -0,0 +1,53 @@ +process GATK4_BASERECALIBRATOR_SPARK { + tag "$meta.id" + label 'process_low' + + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'broadinstitute/gatk:4.2.3.0' }" + + input: + tuple val(meta), path(input), path(input_index), path(intervals) + path fasta + path fai + path dict + path known_sites + path known_sites_tbi + + output: + tuple val(meta), path("*.table"), emit: table + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def interval_command = intervals ? "--intervals $intervals" : "" + def sites_command = known_sites.collect{"--known-sites $it"}.join(' ') + + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK BaseRecalibratorSpark] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } + """ + gatk --java-options "-Xmx${avail_mem}g" BaseRecalibratorSpark \\ + --input $input \\ + --output ${prefix}.table \\ + --reference $fasta \\ + $interval_command \\ + $sites_command \\ + --spark-master local[${task.cpus}] \\ + --tmp-dir . \\ + $args + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + END_VERSIONS + """ +} diff --git a/modules/gatk4/baserecalibratorspark/meta.yml b/modules/gatk4/baserecalibratorspark/meta.yml new file mode 100644 index 00000000..581c48ef --- /dev/null +++ b/modules/gatk4/baserecalibratorspark/meta.yml @@ -0,0 +1,72 @@ +name: gatk4_baserecalibrator_spark +description: Generate recalibration table for Base Quality Score Recalibration (BQSR) +keywords: + - sort +tools: + - gatk4: + description: | + Developed in the Data Sciences Platform at the Broad Institute, the toolkit offers a wide variety of tools + with a primary focus on variant discovery and genotyping. Its powerful processing engine + and high-performance computing features make it capable of taking on projects of any size. + homepage: https://gatk.broadinstitute.org/hc/en-us + documentation: https://gatk.broadinstitute.org/hc/en-us/categories/360002369672s + doi: 10.1158/1538-7445.AM2017-3590 + licence: ["Apache-2.0"] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - input: + type: file + description: BAM/CRAM file from alignment + pattern: "*.{bam,cram}" + - input_index: + type: file + description: BAI/CRAI file from alignment + pattern: "*.{bai,crai}" + - intervals: + type: file + description: Bed file with the genomic regions included in the library (optional) + - fasta: + type: file + description: The reference fasta file + pattern: "*.fasta" + - fai: + type: file + description: Index of reference fasta file + pattern: "*.fasta.fai" + - dict: + type: file + description: GATK sequence dictionary + pattern: "*.dict" + - known_sites: + type: file + description: VCF files with known sites for indels / snps (optional) + pattern: "*.vcf.gz" + - known_sites_tbi: + type: file + description: Tabix index of the known_sites (optional) + pattern: "*.vcf.gz.tbi" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - table: + type: file + description: Recalibration table from BaseRecalibrator + pattern: "*.{table}" + +authors: + - "@yocra3" + - "@FriederikeHanssen" + - "@maxulysse" diff --git a/modules/gatk4/bedtointervallist/main.nf b/modules/gatk4/bedtointervallist/main.nf index c3b624a8..118f535b 100644 --- a/modules/gatk4/bedtointervallist/main.nf +++ b/modules/gatk4/bedtointervallist/main.nf @@ -9,7 +9,7 @@ process GATK4_BEDTOINTERVALLIST { input: tuple val(meta), path(bed) - path sequence_dict + path dict output: tuple val(meta), path('*.interval_list'), emit: interval_list @@ -21,6 +21,7 @@ process GATK4_BEDTOINTERVALLIST { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" + def avail_mem = 3 if (!task.memory) { log.info '[GATK BedToIntervalList] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' @@ -29,9 +30,10 @@ process GATK4_BEDTOINTERVALLIST { } """ gatk --java-options "-Xmx${avail_mem}g" BedToIntervalList \\ - -I $bed \\ - -SD $sequence_dict \\ - -O ${prefix}.interval_list \\ + --INPUT $bed \\ + --OUTPUT ${prefix}.interval_list \\ + --SEQUENCE_DICTIONARY $dict \\ + --TMP_DIR . \\ $args cat <<-END_VERSIONS > versions.yml diff --git a/modules/gatk4/calculatecontamination/main.nf b/modules/gatk4/calculatecontamination/main.nf index 298739ab..197fe6c2 100644 --- a/modules/gatk4/calculatecontamination/main.nf +++ b/modules/gatk4/calculatecontamination/main.nf @@ -9,7 +9,6 @@ process GATK4_CALCULATECONTAMINATION { input: tuple val(meta), path(pileup), path(matched) - val segmentout output: tuple val(meta), path('*.contamination.table'), emit: contamination @@ -22,8 +21,8 @@ process GATK4_CALCULATECONTAMINATION { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" - def matched_command = matched ? " -matched ${matched} " : '' - def segment_command = segmentout ? " -segments ${prefix}.segmentation.table" : '' + def matched_command = matched ? "--matched-normal $matched" : '' + def avail_mem = 3 if (!task.memory) { log.info '[GATK CalculateContamination] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' @@ -32,10 +31,10 @@ process GATK4_CALCULATECONTAMINATION { } """ gatk --java-options "-Xmx${avail_mem}g" CalculateContamination \\ - -I $pileup \\ + --input $pileup \\ + --output ${prefix}.contamination.table \\ $matched_command \\ - -O ${prefix}.contamination.table \\ - $segment_command \\ + --tmp-dir . \\ $args cat <<-END_VERSIONS > versions.yml diff --git a/modules/gatk4/calculatecontamination/meta.yml b/modules/gatk4/calculatecontamination/meta.yml index e5e870dc..7767bd08 100644 --- a/modules/gatk4/calculatecontamination/meta.yml +++ b/modules/gatk4/calculatecontamination/meta.yml @@ -32,9 +32,6 @@ input: type: file description: File containing the pileups summary table of a normal sample that matches with the tumor sample specified in pileup argument. This is an optional input. pattern: "*.pileups.table" - - segmentout: - type: boolean - description: specifies whether to output the segmentation table. output: - contamination: @@ -43,7 +40,7 @@ output: pattern: "*.contamination.table" - segmentation: type: file - description: optional output table containing segmentation of tumor minor allele fractions. + description: output table containing segmentation of tumor minor allele fractions (optional) pattern: "*.segmentation.table" - versions: type: file @@ -52,3 +49,4 @@ output: authors: - "@GCJMackenzie" + - "@maxulysse" diff --git a/modules/gatk4/combinegvcfs/main.nf b/modules/gatk4/combinegvcfs/main.nf index c0a7ac45..45bf4372 100644 --- a/modules/gatk4/combinegvcfs/main.nf +++ b/modules/gatk4/combinegvcfs/main.nf @@ -9,9 +9,9 @@ process GATK4_COMBINEGVCFS { input: tuple val(meta), path(vcf), path(vcf_idx) - path (fasta) - path (fasta_fai) - path (fasta_dict) + path fasta + path fai + path dict output: tuple val(meta), path("*.combined.g.vcf.gz"), emit: combined_gvcf @@ -23,21 +23,21 @@ process GATK4_COMBINEGVCFS { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" - def avail_mem = 3 + def input_list = vcf.collect{"--variant $it"}.join(' ') + + def avail_mem = 3 if (!task.memory) { log.info '[GATK COMBINEGVCFS] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' } else { avail_mem = task.memory.giga } - def input_files = vcf.collect{"-V ${it}"}.join(' ') // add '-V' to each vcf file """ - gatk \\ - --java-options "-Xmx${avail_mem}g" \\ - CombineGVCFs \\ - -R ${fasta} \\ - -O ${prefix}.combined.g.vcf.gz \\ - ${args} \\ - ${input_files} + gatk --java-options "-Xmx${avail_mem}g" CombineGVCFs \\ + $input_list \\ + --output ${prefix}.combined.g.vcf.gz \\ + --reference ${fasta} \\ + --tmp-dir . \\ + $args cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/modules/gatk4/combinegvcfs/meta.yml b/modules/gatk4/combinegvcfs/meta.yml index b891de90..9330e084 100644 --- a/modules/gatk4/combinegvcfs/meta.yml +++ b/modules/gatk4/combinegvcfs/meta.yml @@ -19,18 +19,11 @@ tools: licence: ["Apache-2.0"] input: - - fasta: - type: file - description: The reference fasta file - pattern: "*.fasta" - - fai: - type: file - description: FASTA index file - pattern: "*.{fai}" - - dict: - type: file - description: FASTA dictionary file - pattern: "*.{dict}" + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test' ] - vcf: type: file description: Compressed VCF files @@ -38,7 +31,19 @@ input: - vcf_idx: type: file description: VCF Index file - pattern: "*.{fai}" + pattern: "*.vcf.gz.idx" + - fasta: + type: file + description: The reference fasta file + pattern: "*.fasta" + - fai: + type: file + description: FASTA index file + pattern: "*.fasta.fai" + - dict: + type: file + description: FASTA dictionary file + pattern: "*.dict" output: - gvcf: type: file @@ -53,3 +58,4 @@ authors: - "@sateeshperi" - "@mjcipriano" - "@hseabolt" + - "@maxulysse" diff --git a/modules/gatk4/createsequencedictionary/main.nf b/modules/gatk4/createsequencedictionary/main.nf index dea77a1d..dbf37048 100644 --- a/modules/gatk4/createsequencedictionary/main.nf +++ b/modules/gatk4/createsequencedictionary/main.nf @@ -11,14 +11,15 @@ process GATK4_CREATESEQUENCEDICTIONARY { path fasta output: - path "*.dict" , emit: dict - path "versions.yml" , emit: versions + path "*.dict" , emit: dict + path "versions.yml" , emit: versions when: task.ext.when == null || task.ext.when script: def args = task.ext.args ?: '' + def avail_mem = 6 if (!task.memory) { log.info '[GATK CreateSequenceDictionary] Available memory not known - defaulting to 6GB. Specify process memory requirements to change this.' @@ -26,10 +27,10 @@ process GATK4_CREATESEQUENCEDICTIONARY { avail_mem = task.memory.giga } """ - gatk --java-options "-Xmx${avail_mem}g" \\ - CreateSequenceDictionary \\ + gatk --java-options "-Xmx${avail_mem}g" CreateSequenceDictionary \\ --REFERENCE $fasta \\ --URI $fasta \\ + --TMP_DIR . \\ $args cat <<-END_VERSIONS > versions.yml diff --git a/modules/gatk4/createsomaticpanelofnormals/main.nf b/modules/gatk4/createsomaticpanelofnormals/main.nf index c030f4e3..3df29947 100644 --- a/modules/gatk4/createsomaticpanelofnormals/main.nf +++ b/modules/gatk4/createsomaticpanelofnormals/main.nf @@ -9,9 +9,9 @@ process GATK4_CREATESOMATICPANELOFNORMALS { input: tuple val(meta), path(genomicsdb) - path fasta - path fai - path dict + path fasta + path fai + path dict output: tuple val(meta), path("*.vcf.gz"), emit: vcf @@ -24,6 +24,7 @@ process GATK4_CREATESOMATICPANELOFNORMALS { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" + def avail_mem = 3 if (!task.memory) { log.info '[GATK CreateSomaticPanelOfNormals] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' @@ -31,11 +32,11 @@ process GATK4_CREATESOMATICPANELOFNORMALS { avail_mem = task.memory.giga } """ - gatk --java-options "-Xmx${avail_mem}g" \\ - CreateSomaticPanelOfNormals \\ - -R $fasta \\ - -V gendb://$genomicsdb \\ - -O ${prefix}.vcf.gz \\ + gatk --java-options "-Xmx${avail_mem}g" CreateSomaticPanelOfNormals \\ + --variant gendb://$genomicsdb \\ + --output ${prefix}.vcf.gz \\ + --reference $fasta \\ + --tmp-dir . \\ $args cat <<-END_VERSIONS > versions.yml diff --git a/modules/gatk4/createsomaticpanelofnormals/meta.yml b/modules/gatk4/createsomaticpanelofnormals/meta.yml index e450c68a..43e675fb 100644 --- a/modules/gatk4/createsomaticpanelofnormals/meta.yml +++ b/modules/gatk4/createsomaticpanelofnormals/meta.yml @@ -44,7 +44,7 @@ output: pattern: "*.vcf.gz" - tbi: type: file - description: Index of vcf file + description: Tabix index of vcf file pattern: "*vcf.gz.tbi" - versions: type: file diff --git a/modules/gatk4/estimatelibrarycomplexity/main.nf b/modules/gatk4/estimatelibrarycomplexity/main.nf index ba68bf70..caa34630 100644 --- a/modules/gatk4/estimatelibrarycomplexity/main.nf +++ b/modules/gatk4/estimatelibrarycomplexity/main.nf @@ -8,14 +8,14 @@ process GATK4_ESTIMATELIBRARYCOMPLEXITY { 'quay.io/biocontainers/gatk4:4.2.5.0--hdfd78af_0' }" input: - tuple val(meta), path(cram) - path(fasta) - path(fai) - path(dict) + tuple val(meta), path(input) + path fasta + path fai + path dict output: tuple val(meta), path('*.metrics'), emit: metrics - path "versions.yml" , emit: versions + path "versions.yml" , emit: versions when: task.ext.when == null || task.ext.when @@ -23,7 +23,7 @@ process GATK4_ESTIMATELIBRARYCOMPLEXITY { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" - def crams = cram.collect(){ x -> "-I ".concat(x.toString()) }.join(" ") + def input_list = input.collect(){"--INPUT $it"}.join(" ") def avail_mem = 3 if (!task.memory) { @@ -32,12 +32,12 @@ process GATK4_ESTIMATELIBRARYCOMPLEXITY { avail_mem = task.memory.giga } """ - gatk --java-options "-Xmx${avail_mem}g" EstimateLibraryComplexity \ - ${crams} \ - -O ${prefix}.metrics \ - --REFERENCE_SEQUENCE ${fasta} \ - --VALIDATION_STRINGENCY SILENT \ - --TMP_DIR . $args + gatk --java-options "-Xmx${avail_mem}g" EstimateLibraryComplexity \\ + $input_list \\ + --OUTPUT ${prefix}.metrics \\ + --REFERENCE_SEQUENCE ${fasta} \\ + --TMP_DIR . \\ + $args cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/modules/gatk4/estimatelibrarycomplexity/meta.yml b/modules/gatk4/estimatelibrarycomplexity/meta.yml index 9f2dee60..72a679e9 100644 --- a/modules/gatk4/estimatelibrarycomplexity/meta.yml +++ b/modules/gatk4/estimatelibrarycomplexity/meta.yml @@ -20,7 +20,7 @@ input: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - cram: + - input: type: file description: BAM/CRAM/SAM file pattern: "*.{bam,cram,sam}" @@ -54,3 +54,4 @@ output: authors: - "@FriederikeHanssen" + - "@maxulysse" diff --git a/modules/gatk4/fastqtosam/main.nf b/modules/gatk4/fastqtosam/main.nf index 0c85a74f..199058d0 100644 --- a/modules/gatk4/fastqtosam/main.nf +++ b/modules/gatk4/fastqtosam/main.nf @@ -20,7 +20,8 @@ process GATK4_FASTQTOSAM { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" - def read_files = meta.single_end ? "-F1 $reads" : "-F1 ${reads[0]} -F2 ${reads[1]}" + def reads_command = meta.single_end ? "--FASTQ $reads" : "--FASTQ ${reads[0]} --FASTQ2 ${reads[1]}" + def avail_mem = 3 if (!task.memory) { log.info '[GATK FastqToSam] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' @@ -29,9 +30,10 @@ process GATK4_FASTQTOSAM { } """ gatk --java-options "-Xmx${avail_mem}g" FastqToSam \\ - $read_files \\ - -O ${prefix}.bam \\ - -SM $prefix \\ + $reads_command \\ + --OUTPUT ${prefix}.bam \\ + --SAMPLE_NAME $prefix \\ + --TMP_DIR . \\ $args cat <<-END_VERSIONS > versions.yml diff --git a/modules/gatk4/fastqtosam/meta.yml b/modules/gatk4/fastqtosam/meta.yml index 59e305b8..6e5bf1cd 100644 --- a/modules/gatk4/fastqtosam/meta.yml +++ b/modules/gatk4/fastqtosam/meta.yml @@ -34,14 +34,14 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - versions: - type: file - description: File containing software versions - pattern: "versions.yml" - bam: type: file description: Converted BAM file pattern: "*.bam" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" authors: - "@ntoda03" diff --git a/modules/gatk4/filtermutectcalls/main.nf b/modules/gatk4/filtermutectcalls/main.nf index 77175c7d..c1c82e0b 100644 --- a/modules/gatk4/filtermutectcalls/main.nf +++ b/modules/gatk4/filtermutectcalls/main.nf @@ -8,10 +8,10 @@ process GATK4_FILTERMUTECTCALLS { 'quay.io/biocontainers/gatk4:4.2.5.0--hdfd78af_0' }" input: - tuple val(meta), path(vcf), path(tbi), path(stats), path(orientationbias), path(segmentation), path(contaminationfile), val(contaminationest) - path fasta - path fai - path dict + tuple val(meta), path(vcf), path(vcf_tbi), path(stats), path(orientationbias), path(segmentation), path(table), val(estimate) + path fasta + path fai + path dict output: tuple val(meta), path("*.vcf.gz") , emit: vcf @@ -26,20 +26,11 @@ process GATK4_FILTERMUTECTCALLS { def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" - def orientationbias_options = '' - if (orientationbias) { - orientationbias_options = '--orientation-bias-artifact-priors ' + orientationbias.join(' --orientation-bias-artifact-priors ') - } + def orientationbias_command = orientationbias ? orientationbias.collect{"--orientation-bias-artifact-priors $it"}.join(' ') : '' + def segmentation_command = segmentation ? segmentation.collect{"--tumor-segmentation $it"}.join(' ') : '' + def estimate_command = estimate ? " --contamination-estimate ${estimate} " : '' + def table_command = table ? " --contamination-table ${table} " : '' - def segmentation_options = '' - if (segmentation) { - segmentation_options = '--tumor-segmentation ' + segmentation.join(' --tumor-segmentation ') - } - - def contamination_options = contaminationest ? " --contamination-estimate ${contaminationest} " : '' - if (contaminationfile) { - contamination_options = '--contamination-table ' + contaminationfile.join(' --contamination-table ') - } def avail_mem = 3 if (!task.memory) { log.info '[GATK FilterMutectCalls] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' @@ -48,12 +39,14 @@ process GATK4_FILTERMUTECTCALLS { } """ gatk --java-options "-Xmx${avail_mem}g" FilterMutectCalls \\ - -R $fasta \\ - -V $vcf \\ - $orientationbias_options \\ - $segmentation_options \\ - $contamination_options \\ - -O ${prefix}.vcf.gz \\ + --variant $vcf \\ + --output ${prefix}.vcf.gz \\ + --reference $fasta \\ + $orientationbias_command \\ + $segmentation_command \\ + $estimate_command \\ + $table_command \\ + --tmp-dir . \\ $args cat <<-END_VERSIONS > versions.yml diff --git a/modules/gatk4/filtermutectcalls/meta.yml b/modules/gatk4/filtermutectcalls/meta.yml index 5182c89f..d1972d70 100644 --- a/modules/gatk4/filtermutectcalls/meta.yml +++ b/modules/gatk4/filtermutectcalls/meta.yml @@ -26,9 +26,9 @@ input: type: file description: compressed vcf file of mutect2calls pattern: "*.vcf.gz" - - tbi: + - vcf_tbi: type: file - description: Index of vcf file + description: Tabix index of vcf file pattern: "*vcf.gz.tbi" - stats: type: file @@ -42,13 +42,13 @@ input: type: list description: tables containing segmentation information for input vcf. Optional input. pattern: "*.segmentation.table" - - contaminationfile: + - table: type: list - description: table(s) containing contamination contamination data for input vcf. Optional input, takes priority over contaminationest. + description: table(s) containing contamination data for input vcf. Optional input, takes priority over estimate. pattern: "*.contamination.table" - - contaminationest: + - estimate: type: val - description: estimation of contamination value as a double. Optional input, will only be used if contaminationfile is not specified. + description: estimation of contamination value as a double. Optional input, will only be used if table is not specified. - fasta: type: file description: The reference fasta file @@ -82,3 +82,4 @@ output: authors: - "@GCJMackenzie" + - "@maxulysse" diff --git a/modules/gatk4/gatherbqsrreports/main.nf b/modules/gatk4/gatherbqsrreports/main.nf index 279f1ac8..1f5f2e1b 100644 --- a/modules/gatk4/gatherbqsrreports/main.nf +++ b/modules/gatk4/gatherbqsrreports/main.nf @@ -8,7 +8,7 @@ process GATK4_GATHERBQSRREPORTS { 'quay.io/biocontainers/gatk4:4.2.5.0--hdfd78af_0' }" input: - tuple val(meta), path(recal_table) + tuple val(meta), path(table) output: tuple val(meta), path("*.table"), emit: table @@ -20,7 +20,7 @@ process GATK4_GATHERBQSRREPORTS { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" - def input = recal_table.collect{"-I ${it}"}.join(' ') + def input_list = table.collect{"--input $it"}.join(' ') def avail_mem = 3 if (!task.memory) { @@ -29,12 +29,11 @@ process GATK4_GATHERBQSRREPORTS { avail_mem = task.memory.giga } """ - gatk --java-options "-Xmx${avail_mem}g" \\ - GatherBQSRReports \ - ${input} \ - --tmp-dir . \ - $args \ - --output ${prefix}.table + gatk --java-options "-Xmx${avail_mem}g" GatherBQSRReports \\ + $input_list \\ + --output ${prefix}.table \\ + --tmp-dir . \\ + $args cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/modules/gatk4/gatherbqsrreports/meta.yml b/modules/gatk4/gatherbqsrreports/meta.yml index 62d008d2..99e74951 100644 --- a/modules/gatk4/gatherbqsrreports/meta.yml +++ b/modules/gatk4/gatherbqsrreports/meta.yml @@ -19,7 +19,7 @@ input: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - recal_table: + - table: type: file description: File(s) containing BQSR table(s) pattern: "*.table" @@ -30,14 +30,14 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] + - table: + type: file + description: File containing joined BQSR table + pattern: "*.table" - versions: type: file description: File containing software versions pattern: "versions.yml" - - recal_table: - type: file - description: File containing joined BQSR table - pattern: "*.table" authors: - "@FriederikeHanssen" diff --git a/modules/gatk4/gatherpileupsummaries/main.nf b/modules/gatk4/gatherpileupsummaries/main.nf index 52e57127..f5e9cf22 100644 --- a/modules/gatk4/gatherpileupsummaries/main.nf +++ b/modules/gatk4/gatherpileupsummaries/main.nf @@ -10,11 +10,11 @@ process GATK4_GATHERPILEUPSUMMARIES { input: tuple val(meta), path(pileup) - path dict + path dict output: tuple val(meta), path("*.pileupsummaries.table"), emit: table - path "versions.yml" , emit: versions + path "versions.yml" , emit: versions when: task.ext.when == null || task.ext.when @@ -22,7 +22,7 @@ process GATK4_GATHERPILEUPSUMMARIES { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" - def input = pileup.collect{ "-I ${it} " }.join(' ') + def input_list = pileup.collect{ "--I $it" }.join(' ') def avail_mem = 3 if (!task.memory) { @@ -31,11 +31,12 @@ process GATK4_GATHERPILEUPSUMMARIES { avail_mem = task.memory.giga } """ - gatk --java-options "-Xmx${avail_mem}g" \ - GatherPileupSummaries \ - --sequence-dictionary ${dict} \ - ${input} \ - -O ${prefix}.pileupsummaries.table + gatk --java-options "-Xmx${avail_mem}g" GatherPileupSummaries \\ + $input_list \\ + --O ${prefix}.pileupsummaries.table \\ + --sequence-dictionary $dict \\ + --tmp-dir . \\ + $args cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/modules/gatk4/gatherpileupsummaries/meta.yml b/modules/gatk4/gatherpileupsummaries/meta.yml index 2dc92d55..823ea365 100644 --- a/modules/gatk4/gatherpileupsummaries/meta.yml +++ b/modules/gatk4/gatherpileupsummaries/meta.yml @@ -28,14 +28,15 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] + - table: + type: file + description: pileup summaries table file + pattern: "*.pileupsummaries.table" - versions: type: file description: File containing software versions pattern: "versions.yml" - - table: - type: file - description: Pileup file - pattern: "*.pileups.table" authors: - "@FriederikeHanssen" + - "@maxulysse" diff --git a/modules/gatk4/genomicsdbimport/main.nf b/modules/gatk4/genomicsdbimport/main.nf index d2d89ccc..d2b78899 100644 --- a/modules/gatk4/genomicsdbimport/main.nf +++ b/modules/gatk4/genomicsdbimport/main.nf @@ -8,13 +8,13 @@ process GATK4_GENOMICSDBIMPORT { 'quay.io/biocontainers/gatk4:4.2.5.0--hdfd78af_0' }" input: - tuple val(meta), path(vcf), path(tbi), path(intervalfile), val(intervalval), path(wspace) - val run_intlist - val run_updatewspace - val input_map + tuple val(meta), path(vcf), path(tbi), path(interval_file), val(interval_value), path(wspace) + val run_intlist + val run_updatewspace + val input_map output: - tuple val(meta), path("${prefix}") , optional:true, emit: genomicsdb + tuple val(meta), path("$prefix") , optional:true, emit: genomicsdb tuple val(meta), path("$updated_db") , optional:true, emit: updatedb tuple val(meta), path("*.interval_list"), optional:true, emit: intervallist path "versions.yml" , emit: versions @@ -27,22 +27,22 @@ process GATK4_GENOMICSDBIMPORT { prefix = task.ext.prefix ?: "${meta.id}" // settings for running default create gendb mode - inputs_command = input_map ? "--sample-name-map ${vcf[0]}" : "${'-V ' + vcf.join(' -V ')}" - dir_command = "--genomicsdb-workspace-path ${prefix}" - intervals_command = intervalfile ? " -L ${intervalfile} " : " -L ${intervalval} " + input_command = input_map ? "--sample-name-map ${vcf[0]}" : vcf.collect(){"--variant $it"}.join(' ') + + genomicsdb_command = "--genomicsdb-workspace-path ${prefix}" + interval_command = interval_file ? "--intervals ${interval_file}" : "--intervals ${interval_value}" // settings changed for running get intervals list mode if run_intlist is true if (run_intlist) { - inputs_command = '' - dir_command = "--genomicsdb-update-workspace-path ${wspace}" - intervals_command = "--output-interval-list-to-file ${prefix}.interval_list" + genomicsdb_command = "--genomicsdb-update-workspace-path ${wspace}" + interval_command = "--output-interval-list-to-file ${prefix}.interval_list" } - // settings changed for running update gendb mode. inputs_command same as default, update_db forces module to emit the updated gendb + // settings changed for running update gendb mode. input_command same as default, update_db forces module to emit the updated gendb if (run_updatewspace) { - dir_command = "--genomicsdb-update-workspace-path ${wspace}" - intervals_command = '' - updated_db = wspace.toString() + genomicsdb_command = "--genomicsdb-update-workspace-path ${wspace}" + interval_command = '' + updated_db = "${wspace}" } def avail_mem = 3 @@ -53,9 +53,10 @@ process GATK4_GENOMICSDBIMPORT { } """ gatk --java-options "-Xmx${avail_mem}g" GenomicsDBImport \\ - $inputs_command \\ - $dir_command \\ - $intervals_command \\ + $input_command \\ + $genomicsdb_command \\ + $interval_command \\ + --tmp-dir . \\ $args cat <<-END_VERSIONS > versions.yml diff --git a/modules/gatk4/genotypegvcfs/main.nf b/modules/gatk4/genotypegvcfs/main.nf index 4a42ad0a..0df88d66 100644 --- a/modules/gatk4/genotypegvcfs/main.nf +++ b/modules/gatk4/genotypegvcfs/main.nf @@ -10,10 +10,10 @@ process GATK4_GENOTYPEGVCFS { input: tuple val(meta), path(gvcf), path(gvcf_index), path(intervals), path(intervals_index) path fasta - path fasta_index - path fasta_dict + path fai + path dict path dbsnp - path dbsnp_index + path dbsnp_tbi output: tuple val(meta), path("*.vcf.gz"), emit: vcf @@ -26,9 +26,10 @@ process GATK4_GENOTYPEGVCFS { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" - def dbsnp_options = dbsnp ? "-D ${dbsnp}" : "" - def interval_options = intervals ? "-L ${intervals}" : "" - def gvcf_options = gvcf.name.endsWith(".vcf") || gvcf.name.endsWith(".vcf.gz") ? "$gvcf" : "gendb://$gvcf" + def gvcf_command = gvcf.name.endsWith(".vcf") || gvcf.name.endsWith(".vcf.gz") ? "$gvcf" : "gendb://$gvcf" + def dbsnp_command = dbsnp ? "--dbsnp $dbsnp" : "" + def interval_command = intervals ? "--intervals $intervals" : "" + def avail_mem = 3 if (!task.memory) { log.info '[GATK GenotypeGVCFs] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' @@ -36,14 +37,14 @@ process GATK4_GENOTYPEGVCFS { avail_mem = task.memory.giga } """ - gatk --java-options "-Xmx${avail_mem}g" \\ - GenotypeGVCFs \\ - $args \\ - $interval_options \\ - $dbsnp_options \\ - -R $fasta \\ - -V $gvcf_options \\ - -O ${prefix}.vcf.gz + gatk --java-options "-Xmx${avail_mem}g" GenotypeGVCFs \\ + --variant $gvcf_command \\ + --output ${prefix}.vcf.gz \\ + --reference $fasta \\ + $interval_command \\ + $dbsnp_command \\ + --tmp-dir . \\ + $args cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/modules/gatk4/genotypegvcfs/meta.yml b/modules/gatk4/genotypegvcfs/meta.yml index f465f835..7bec10ed 100644 --- a/modules/gatk4/genotypegvcfs/meta.yml +++ b/modules/gatk4/genotypegvcfs/meta.yml @@ -21,10 +21,15 @@ input: Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - gvcf: - type: tuple of files + type: file description: | - Tuple of gVCF(.gz) file (first) and its index (second) or the path to a GenomicsDB (and empty) - pattern: ["*.{vcf,vcf.gz}", "*.{idx,tbi}"] + gVCF(.gz) file or to a GenomicsDB + pattern: "*.{vcf,vcf.gz}" + - gvcf_index: + type: file + description: | + index of gvcf file, or empty when providing GenomicsDB + pattern: "*.{idx,tbi}" - intervals: type: file description: Interval file with the genomic regions included in the library (optional) @@ -35,11 +40,11 @@ input: type: file description: Reference fasta file pattern: "*.fasta" - - fasta_index: + - fai: type: file description: Reference fasta index file pattern: "*.fai" - - fasta_dict: + - dict: type: file description: Reference fasta sequence dict file pattern: "*.dict" @@ -47,8 +52,8 @@ input: type: file description: dbSNP VCF file pattern: "*.vcf.gz" - - dbsnp_index: - type: tuple of files + - dbsnp_tbi: + type: file description: dbSNP VCF index file pattern: "*.tbi" @@ -73,3 +78,4 @@ output: authors: - "@santiagorevale" + - "@maxulysse" diff --git a/modules/gatk4/getpileupsummaries/main.nf b/modules/gatk4/getpileupsummaries/main.nf index 5395c068..c0946f71 100644 --- a/modules/gatk4/getpileupsummaries/main.nf +++ b/modules/gatk4/getpileupsummaries/main.nf @@ -9,15 +9,15 @@ process GATK4_GETPILEUPSUMMARIES { input: tuple val(meta), path(input), path(index), path(intervals) - path fasta - path fai - path dict - path variants - path variants_tbi + path fasta + path fai + path dict + path variants + path variants_tbi output: tuple val(meta), path('*.pileups.table'), emit: table - path "versions.yml" , emit: versions + path "versions.yml" , emit: versions when: task.ext.when == null || task.ext.when @@ -25,8 +25,8 @@ process GATK4_GETPILEUPSUMMARIES { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" - def sitesCommand = intervals ? " -L ${intervals} " : " -L ${variants} " - def reference = fasta ? " -R ${fasta}" :"" + def interval_command = intervals ? "--intervals $intervals" : "" + def reference_command = fasta ? "--reference $fasta" : '' def avail_mem = 3 if (!task.memory) { @@ -36,11 +36,12 @@ process GATK4_GETPILEUPSUMMARIES { } """ gatk --java-options "-Xmx${avail_mem}g" GetPileupSummaries \\ - -I $input \\ - -V $variants \\ - $sitesCommand \\ - ${reference} \\ - -O ${prefix}.pileups.table \\ + --input $input \\ + --variant $variants \\ + --output ${prefix}.pileups.table \\ + $reference_command \\ + $sites_command \\ + --tmp-dir . \\ $args cat <<-END_VERSIONS > versions.yml diff --git a/modules/gatk4/haplotypecaller/main.nf b/modules/gatk4/haplotypecaller/main.nf index 33871fcf..57f69ecd 100644 --- a/modules/gatk4/haplotypecaller/main.nf +++ b/modules/gatk4/haplotypecaller/main.nf @@ -9,11 +9,11 @@ process GATK4_HAPLOTYPECALLER { input: tuple val(meta), path(input), path(input_index), path(intervals) - path fasta - path fai - path dict - path dbsnp - path dbsnp_tbi + path fasta + path fai + path dict + path dbsnp + path dbsnp_tbi output: tuple val(meta), path("*.vcf.gz"), emit: vcf @@ -26,25 +26,24 @@ process GATK4_HAPLOTYPECALLER { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" - def interval_option = intervals ? "-L ${intervals}" : "" - def dbsnp_option = dbsnp ? "-D ${dbsnp}" : "" - def avail_mem = 3 + def dbsnp_command = dbsnp ? "--dbsnp $dbsnp" : "" + def interval_command = intervals ? "--intervals $intervals" : "" + + def avail_mem = 3 if (!task.memory) { log.info '[GATK HaplotypeCaller] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' } else { avail_mem = task.memory.giga } """ - gatk \\ - --java-options "-Xmx${avail_mem}g" \\ - HaplotypeCaller \\ - -R $fasta \\ - -I $input \\ - ${dbsnp_option} \\ - ${interval_option} \\ - -O ${prefix}.vcf.gz \\ - $args \\ - --tmp-dir . + gatk --java-options "-Xmx${avail_mem}g" HaplotypeCaller \\ + --input $input \\ + --output ${prefix}.vcf.gz \\ + --reference $fasta \\ + $dbsnp_command \\ + $interval_command \\ + --tmp-dir . \\ + $args cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/modules/gatk4/indexfeaturefile/main.nf b/modules/gatk4/indexfeaturefile/main.nf index 275e51f5..90ff94e6 100644 --- a/modules/gatk4/indexfeaturefile/main.nf +++ b/modules/gatk4/indexfeaturefile/main.nf @@ -19,6 +19,7 @@ process GATK4_INDEXFEATUREFILE { script: def args = task.ext.args ?: '' + def avail_mem = 3 if (!task.memory) { log.info '[GATK IndexFeatureFile] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' @@ -26,10 +27,10 @@ process GATK4_INDEXFEATUREFILE { avail_mem = task.memory.giga } """ - gatk --java-options "-Xmx${avail_mem}g" \\ - IndexFeatureFile \\ - $args \\ - -I $feature_file + gatk --java-options "-Xmx${avail_mem}g" IndexFeatureFile \\ + --input $feature_file \\ + --tmp-dir . \\ + $args cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/modules/gatk4/intervallisttobed/main.nf b/modules/gatk4/intervallisttobed/main.nf index 24d20be1..c0f9df63 100644 --- a/modules/gatk4/intervallisttobed/main.nf +++ b/modules/gatk4/intervallisttobed/main.nf @@ -8,7 +8,7 @@ process GATK4_INTERVALLISTTOBED { 'quay.io/biocontainers/gatk4:4.2.5.0--hdfd78af_0' }" input: - tuple val(meta), path(interval) + tuple val(meta), path(intervals) output: tuple val(meta), path("*.bed"), emit: bed @@ -29,8 +29,9 @@ process GATK4_INTERVALLISTTOBED { } """ gatk --java-options "-Xmx${avail_mem}g" IntervalListToBed \\ - --INPUT ${interval} \\ + --INPUT $intervals \\ --OUTPUT ${prefix}.bed \\ + --TMP_DIR . \\ $args cat <<-END_VERSIONS > versions.yml diff --git a/modules/gatk4/intervallisttools/main.nf b/modules/gatk4/intervallisttools/main.nf index 82c3222c..1b9b37f4 100644 --- a/modules/gatk4/intervallisttools/main.nf +++ b/modules/gatk4/intervallisttools/main.nf @@ -8,11 +8,11 @@ process GATK4_INTERVALLISTTOOLS { 'quay.io/biocontainers/gatk4:4.2.5.0--hdfd78af_0' }" input: - tuple val(meta), path(interval_list) + tuple val(meta), path(intervals) output: tuple val(meta), path("*_split/*/*.interval_list"), emit: interval_list - path "versions.yml" , emit: versions + path "versions.yml" , emit: versions when: task.ext.when == null || task.ext.when @@ -20,6 +20,7 @@ process GATK4_INTERVALLISTTOOLS { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" + def avail_mem = 3 if (!task.memory) { log.info '[GATK IntervalListTools] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' @@ -30,10 +31,10 @@ process GATK4_INTERVALLISTTOOLS { mkdir ${prefix}_split - gatk --java-options "-Xmx${avail_mem}g" \\ - IntervalListTools \\ - -I ${interval_list} \\ - -O ${prefix}_split \\ + gatk --java-options "-Xmx${avail_mem}g" IntervalListTools \\ + --INPUT $intervals \\ + --OUTPUT ${prefix}_split \\ + --TMP_DIR . \\ $args python3 < inputs_list.add(" -I " + a) } + def input_list = f1r2.collect{"--input $it"}.join(' ') + def avail_mem = 3 if (!task.memory) { log.info '[GATK LearnReadOrientationModel] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' @@ -29,10 +29,10 @@ process GATK4_LEARNREADORIENTATIONMODEL { avail_mem = task.memory.giga } """ - gatk --java-options "-Xmx${avail_mem}g" \\ - LearnReadOrientationModel \\ - ${inputs_list.join(' ')} \\ - -O ${prefix}.tar.gz \\ + gatk --java-options "-Xmx${avail_mem}g" LearnReadOrientationModel \\ + $input_list \\ + --output ${prefix}.tar.gz \\ + --tmp-dir . \\ $args cat <<-END_VERSIONS > versions.yml diff --git a/modules/gatk4/markduplicates/main.nf b/modules/gatk4/markduplicates/main.nf index 6b150655..e8a98156 100644 --- a/modules/gatk4/markduplicates/main.nf +++ b/modules/gatk4/markduplicates/main.nf @@ -8,7 +8,7 @@ process GATK4_MARKDUPLICATES { 'quay.io/biocontainers/gatk4:4.2.5.0--hdfd78af_0' }" input: - tuple val(meta), path(bams) + tuple val(meta), path(bam) output: tuple val(meta), path("*.bam") , emit: bam @@ -22,7 +22,8 @@ process GATK4_MARKDUPLICATES { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" - def bam_list = bams.collect(){ bam -> "--INPUT ".concat(bam.toString()) }.join(" ") + def input_list = bam.collect{"--INPUT $it"}.join(' ') + def avail_mem = 3 if (!task.memory) { log.info '[GATK MarkDuplicates] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' @@ -31,11 +32,10 @@ process GATK4_MARKDUPLICATES { } """ gatk --java-options "-Xmx${avail_mem}g" MarkDuplicates \\ - $bam_list \\ + $input_list \\ + --OUTPUT ${prefix}.bam \\ --METRICS_FILE ${prefix}.metrics \\ --TMP_DIR . \\ - --CREATE_INDEX true \\ - --OUTPUT ${prefix}.bam \\ $args cat <<-END_VERSIONS > versions.yml diff --git a/modules/gatk4/markduplicates/meta.yml b/modules/gatk4/markduplicates/meta.yml index a7dbe8ec..93877f47 100644 --- a/modules/gatk4/markduplicates/meta.yml +++ b/modules/gatk4/markduplicates/meta.yml @@ -49,3 +49,4 @@ output: authors: - "@ajodeh-juma" - "@FriederikeHanssen" + - "@maxulysse" diff --git a/modules/gatk4/markduplicatesspark/main.nf b/modules/gatk4/markduplicatesspark/main.nf new file mode 100644 index 00000000..01a19e5c --- /dev/null +++ b/modules/gatk4/markduplicatesspark/main.nf @@ -0,0 +1,50 @@ +process GATK4_MARKDUPLICATES_SPARK { + tag "$meta.id" + label 'process_high' + + conda (params.enable_conda ? "bioconda::gatk4=4.2.3.0" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gatk4:4.2.3.0--hdfd78af_0' : + 'broadinstitute/gatk:4.2.3.0' }" + + input: + tuple val(meta), path(bam) + path fasta + path fasta_fai + path dict + + output: + tuple val(meta), path("${prefix}"), emit: output + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + prefix = task.ext.prefix ?: "${meta.id}" + def input_list = bam.collect{"--INPUT $it"}.join(' ') + + def avail_mem = 3 + if (!task.memory) { + log.info '[GATK MarkDuplicatesSpark] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' + } else { + avail_mem = task.memory.giga + } + """ + export SPARK_USER=spark3 + + gatk --java-options "-Xmx${avail_mem}g" MarkDuplicatesSpark \\ + $input_list \\ + --output $prefix \\ + --reference $fasta \\ + --spark-master local[${task.cpus}] \\ + --tmp-dir . \\ + $args + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + END_VERSIONS + """ +} diff --git a/modules/gatk4/markduplicatesspark/meta.yml b/modules/gatk4/markduplicatesspark/meta.yml new file mode 100644 index 00000000..bf3e02ba --- /dev/null +++ b/modules/gatk4/markduplicatesspark/meta.yml @@ -0,0 +1,60 @@ +name: gatk4_markduplicates_spark +description: This tool locates and tags duplicate reads in a BAM or SAM file, where duplicate reads are defined as originating from a single fragment of DNA. +keywords: + - markduplicates + - bam + - sort +tools: + - gatk4: + description: + Developed in the Data Sciences Platform at the Broad Institute, the toolkit offers a wide variety of tools + with a primary focus on variant discovery and genotyping. Its powerful processing engine + and high-performance computing features make it capable of taking on projects of any size. + homepage: https://gatk.broadinstitute.org/hc/en-us + documentation: https://gatk.broadinstitute.org/hc/en-us/articles/360037052812-MarkDuplicates-Picard- + tool_dev_url: https://github.com/broadinstitute/gatk + doi: 10.1158/1538-7445.AM2017-3590 + licence: ["MIT"] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: Sorted BAM file + pattern: "*.{bam}" + - fasta: + type: file + description: The reference fasta file + pattern: "*.fasta" + - fai: + type: file + description: Index of reference fasta file + pattern: "*.fasta.fai" + - dict: + type: file + description: GATK sequence dictionary + pattern: "*.dict" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - bam: + type: file + description: Marked duplicates BAM file + pattern: "*.{bam}" + +authors: + - "@ajodeh-juma" + - "@FriederikeHanssen" + - "@maxulysse" diff --git a/modules/gatk4/mergebamalignment/main.nf b/modules/gatk4/mergebamalignment/main.nf index cfeb23dd..7ba9ccda 100644 --- a/modules/gatk4/mergebamalignment/main.nf +++ b/modules/gatk4/mergebamalignment/main.nf @@ -22,6 +22,7 @@ process GATK4_MERGEBAMALIGNMENT { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" + def avail_mem = 3 if (!task.memory) { log.info '[GATK MergeBamAlignment] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' @@ -30,10 +31,11 @@ process GATK4_MERGEBAMALIGNMENT { } """ gatk --java-options "-Xmx${avail_mem}g" MergeBamAlignment \\ - -ALIGNED $aligned \\ - -UNMAPPED $unmapped \\ - -R $fasta \\ - -O ${prefix}.bam \\ + --UNMAPPED_BAM $unmapped \\ + --ALIGNED_BAM $aligned \\ + --OUTPUT ${prefix}.bam \\ + --REFERENCE_SEQUENCE $fasta \\ + --TMP_DIR . \\ $args cat <<-END_VERSIONS > versions.yml diff --git a/modules/gatk4/mergemutectstats/main.nf b/modules/gatk4/mergemutectstats/main.nf index bb9f91fb..409e06f6 100644 --- a/modules/gatk4/mergemutectstats/main.nf +++ b/modules/gatk4/mergemutectstats/main.nf @@ -9,6 +9,7 @@ process GATK4_MERGEMUTECTSTATS { input: tuple val(meta), path(stats) + output: tuple val(meta), path("*.vcf.gz.stats"), emit: stats path "versions.yml" , emit: versions @@ -19,7 +20,7 @@ process GATK4_MERGEMUTECTSTATS { script: def args = task.ext.args ?: '' prefix = task.ext.prefix ?: "${meta.id}" - def input = stats.collect{ " -stats ${it} "}.join() + def input_list = stats.collect{ "--stats ${it}"}.join(' ') def avail_mem = 3 if (!task.memory) { @@ -29,8 +30,9 @@ process GATK4_MERGEMUTECTSTATS { } """ gatk --java-options "-Xmx${avail_mem}g" MergeMutectStats \\ - ${input} \\ - -output ${meta.id}.vcf.gz.stats \\ + $input_list \\ + --output ${prefix}.vcf.gz.stats \\ + --tmp-dir . \\ $args cat <<-END_VERSIONS > versions.yml diff --git a/modules/gatk4/mergevcfs/main.nf b/modules/gatk4/mergevcfs/main.nf index 54e38667..06ff3acb 100644 --- a/modules/gatk4/mergevcfs/main.nf +++ b/modules/gatk4/mergevcfs/main.nf @@ -8,9 +8,8 @@ process GATK4_MERGEVCFS { 'quay.io/biocontainers/gatk4:4.2.5.0--hdfd78af_0' }" input: - tuple val(meta), path(vcfs) - path ref_dict - val use_ref_dict + tuple val(meta), path(vcf) + path dict output: tuple val(meta), path('*.vcf.gz'), emit: vcf @@ -22,13 +21,9 @@ process GATK4_MERGEVCFS { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" + def input_list = vcf.collect{ "--INPUT $it"}.join(' ') + def reference_command = dict ? "--SEQUENCE_DICTIONARY $dict" : "" - // Make list of VCFs to merge - def input = "" - for (vcf in vcfs) { - input += " I=${vcf}" - } - def ref = use_ref_dict ? "D=${ref_dict}" : "" def avail_mem = 3 if (!task.memory) { log.info '[GATK MergeVcfs] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' @@ -37,9 +32,10 @@ process GATK4_MERGEVCFS { } """ gatk --java-options "-Xmx${avail_mem}g" MergeVcfs \\ - $input \\ - O=${prefix}.vcf.gz \\ - $ref \\ + $input_list \\ + --OUTPUT ${prefix}.vcf.gz \\ + $reference_command \\ + --TMP_DIR . \\ $args cat <<-END_VERSIONS > versions.yml diff --git a/modules/gatk4/mutect2/main.nf b/modules/gatk4/mutect2/main.nf index 568d3393..4a1f5768 100644 --- a/modules/gatk4/mutect2/main.nf +++ b/modules/gatk4/mutect2/main.nf @@ -8,10 +8,7 @@ process GATK4_MUTECT2 { 'quay.io/biocontainers/gatk4:4.2.5.0--hdfd78af_0' }" input: - tuple val(meta) , path(input) , path(input_index) , path(intervals), val(which_norm) - val run_single - val run_pon - val run_mito + tuple val(meta), path(input), path(input_index), path(intervals) path fasta path fai path dict @@ -33,28 +30,10 @@ process GATK4_MUTECT2 { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" - def panels_command = '' - def normals_command = '' - - def inputs_command = '-I ' + input.join( ' -I ') - def interval = intervals ? "-L ${intervals}" : "" - - if(run_pon) { - panels_command = '' - normals_command = '' - - } else if(run_single) { - panels_command = " --germline-resource $germline_resource --panel-of-normals $panel_of_normals" - normals_command = '' - - } else if(run_mito){ - panels_command = "-L ${intervals} --mitochondria-mode" - normals_command = '' - - } else { - panels_command = " --germline-resource $germline_resource --panel-of-normals $panel_of_normals --f1r2-tar-gz ${prefix}.f1r2.tar.gz" - normals_command = '-normal ' + which_norm.join( ' -normal ') - } + def inputs = input.collect{ "--input $it"}.join(" ") + def interval_command = intervals ? "--intervals $intervals" : "" + def pon_command = panel_of_normals ? "--panel-of-normals $panel_of_normals" : "" + def gr_command = germline_resource ? "--germline-resource $germline_resource" : "" def avail_mem = 3 if (!task.memory) { @@ -64,12 +43,13 @@ process GATK4_MUTECT2 { } """ gatk --java-options "-Xmx${avail_mem}g" Mutect2 \\ - -R ${fasta} \\ - ${inputs_command} \\ - ${normals_command} \\ - ${panels_command} \\ - ${interval} \\ - -O ${prefix}.vcf.gz \\ + $inputs \\ + --output ${prefix}.vcf.gz \\ + --reference $fasta \\ + $pon_command \\ + $gr_command \\ + $interval_command \\ + --tmp-dir . \\ $args cat <<-END_VERSIONS > versions.yml diff --git a/modules/gatk4/mutect2/meta.yml b/modules/gatk4/mutect2/meta.yml index 69a4acfe..aa0a02aa 100644 --- a/modules/gatk4/mutect2/meta.yml +++ b/modules/gatk4/mutect2/meta.yml @@ -34,22 +34,6 @@ input: type: File/string description: Specify region the tools is run on. pattern: ".{bed,interval_list}/chrM" - - which_norm: - type: list - description: optional list of sample headers contained in the normal sample bam files (these are required for tumor_normal_pair mode) - pattern: "testN" - - run_single: - type: boolean - description: Specify whether or not to run in tumor_single mode instead of tumor_normal_pair mode (will be ignored if run_pon is also true) - pattern: "true/false" - - run_pon: - type: boolean - description: Specify whether or not to run in panel_of_normal mode instead of tumor_normal_pair mode - pattern: "true/false" - - run_mito: - type: boolean - description: Specify whether or not to run in mitochondria-mode instead of tumor_normal_pair mode - pattern: "true/false" - fasta: type: file description: The reference fasta file diff --git a/modules/gatk4/revertsam/main.nf b/modules/gatk4/revertsam/main.nf index b3bf9f95..4e8e9ddc 100644 --- a/modules/gatk4/revertsam/main.nf +++ b/modules/gatk4/revertsam/main.nf @@ -20,6 +20,7 @@ process GATK4_REVERTSAM { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" + def avail_mem = 3 if (!task.memory) { log.info '[GATK RevertSam] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' @@ -28,8 +29,9 @@ process GATK4_REVERTSAM { } """ gatk --java-options "-Xmx${avail_mem}g" RevertSam \\ - I=$bam \\ - O=${prefix}.reverted.bam \\ + --INPUT $bam \\ + --OUTPUT ${prefix}.reverted.bam \\ + --TMP_DIR . \\ $args cat <<-END_VERSIONS > versions.yml diff --git a/modules/gatk4/samtofastq/main.nf b/modules/gatk4/samtofastq/main.nf index 53e5013f..8553e419 100644 --- a/modules/gatk4/samtofastq/main.nf +++ b/modules/gatk4/samtofastq/main.nf @@ -20,7 +20,8 @@ process GATK4_SAMTOFASTQ { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" - def output = meta.single_end ? "FASTQ=${prefix}.fastq.gz" : "FASTQ=${prefix}_1.fastq.gz SECOND_END_FASTQ=${prefix}_2.fastq.gz" + def output = meta.single_end ? "--FASTQ ${prefix}.fastq.gz" : "--FASTQ ${prefix}_1.fastq.gz --SECOND_END_FASTQ ${prefix}_2.fastq.gz" + def avail_mem = 3 if (!task.memory) { log.info '[GATK SamToFastq] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' @@ -29,8 +30,9 @@ process GATK4_SAMTOFASTQ { } """ gatk --java-options "-Xmx${avail_mem}g" SamToFastq \\ - I=$bam \\ + --INPUT $bam \\ $output \\ + --TMP_DIR . \\ $args cat <<-END_VERSIONS > versions.yml diff --git a/modules/gatk4/selectvariants/main.nf b/modules/gatk4/selectvariants/main.nf index fd750a9b..22779211 100644 --- a/modules/gatk4/selectvariants/main.nf +++ b/modules/gatk4/selectvariants/main.nf @@ -21,6 +21,7 @@ process GATK4_SELECTVARIANTS { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" + def avail_mem = 3 if (!task.memory) { log.info '[GATK VariantFiltration] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' @@ -29,8 +30,9 @@ process GATK4_SELECTVARIANTS { } """ gatk --java-options "-Xmx${avail_mem}G" SelectVariants \\ - -V $vcf \\ - -O ${prefix}.selectvariants.vcf.gz \\ + --variant $vcf \\ + --output ${prefix}.selectvariants.vcf.gz \\ + --tmp-dir . \\ $args cat <<-END_VERSIONS > versions.yml diff --git a/modules/gatk4/splitncigarreads/main.nf b/modules/gatk4/splitncigarreads/main.nf index fdd1d974..f7c559d9 100644 --- a/modules/gatk4/splitncigarreads/main.nf +++ b/modules/gatk4/splitncigarreads/main.nf @@ -23,6 +23,7 @@ process GATK4_SPLITNCIGARREADS { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" + def avail_mem = 3 if (!task.memory) { log.info '[GATK SplitNCigarReads] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' @@ -31,9 +32,10 @@ process GATK4_SPLITNCIGARREADS { } """ gatk --java-options "-Xmx${avail_mem}g" SplitNCigarReads \\ - -R $fasta \\ - -I $bam \\ - -O ${prefix}.bam \\ + --input $bam \\ + --output ${prefix}.bam \\ + --reference $fasta \\ + --tmp-dir . \\ $args cat <<-END_VERSIONS > versions.yml diff --git a/modules/gatk4/variantfiltration/main.nf b/modules/gatk4/variantfiltration/main.nf index 68f3d636..6beb87ef 100644 --- a/modules/gatk4/variantfiltration/main.nf +++ b/modules/gatk4/variantfiltration/main.nf @@ -8,7 +8,7 @@ process GATK4_VARIANTFILTRATION { 'quay.io/biocontainers/gatk4:4.2.5.0--hdfd78af_0' }" input: - tuple val(meta), path(vcf), path(vcf_tbi) + tuple val(meta), path(vcf), path(tbi) path fasta path fai path dict @@ -24,6 +24,7 @@ process GATK4_VARIANTFILTRATION { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" + def avail_mem = 3 if (!task.memory) { log.info '[GATK VariantFiltration] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' @@ -32,9 +33,10 @@ process GATK4_VARIANTFILTRATION { } """ gatk --java-options "-Xmx${avail_mem}G" VariantFiltration \\ - -R $fasta \\ - -V $vcf \\ - -O ${prefix}.vcf.gz \\ + --variant $vcf \\ + --output ${prefix}.vcf.gz \\ + --reference $fasta \\ + --tmp-dir . \\ $args cat <<-END_VERSIONS > versions.yml diff --git a/modules/gatk4/variantrecalibrator/main.nf b/modules/gatk4/variantrecalibrator/main.nf index 31c9efbd..cdcc1221 100644 --- a/modules/gatk4/variantrecalibrator/main.nf +++ b/modules/gatk4/variantrecalibrator/main.nf @@ -8,11 +8,11 @@ process GATK4_VARIANTRECALIBRATOR { 'quay.io/biocontainers/gatk4:4.2.5.0--hdfd78af_0' }" input: - tuple val(meta), path(vcf) , path(tbi) - path fasta - path fai - path dict - tuple path(resvcfs), path(restbis), val(reslabels) + tuple val(meta), path(vcf), path(tbi) + tuple path(vcfs), path(tbis), val(labels) + path fasta + path fai + path dict output: tuple val(meta), path("*.recal") , emit: recal @@ -27,8 +27,8 @@ process GATK4_VARIANTRECALIBRATOR { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" - refCommand = fasta ? "-R ${fasta} " : '' - resourceCommand = '--resource:' + reslabels.join( ' --resource:') + def reference_command = fasta ? "--reference $fasta " : '' + def resource_command = labels.collect{"--resource:$it"}.join(' ') def avail_mem = 3 if (!task.memory) { @@ -38,11 +38,12 @@ process GATK4_VARIANTRECALIBRATOR { } """ gatk --java-options "-Xmx${avail_mem}g" VariantRecalibrator \\ - ${refCommand} \\ - -V ${vcf} \\ - -O ${prefix}.recal \\ + --variant $vcf \\ + --output ${prefix}.recal \\ --tranches-file ${prefix}.tranches \\ - ${resourceCommand} \\ + $reference_command \\ + $resource_command \\ + --tmp-dir . \\ $args cat <<-END_VERSIONS > versions.yml diff --git a/tests/modules/gatk4/applybqsr/test.yml b/tests/modules/gatk4/applybqsr/test.yml index 4520c34b..eaf1a08e 100644 --- a/tests/modules/gatk4/applybqsr/test.yml +++ b/tests/modules/gatk4/applybqsr/test.yml @@ -7,7 +7,6 @@ - path: output/gatk4/test.bam md5sum: d088422be886dc8507ff97fcc7dd968a - path: output/gatk4/versions.yml - md5sum: d5c6455d8a77aecc63f87c795fc3443e - name: gatk4 applybqsr test_gatk4_applybqsr_intervals command: nextflow run tests/modules/gatk4/applybqsr -entry test_gatk4_applybqsr_intervals -c tests/config/nextflow.config -c ./tests/modules/gatk4/applybqsr/nextflow.config @@ -18,7 +17,6 @@ - path: output/gatk4/test.bam md5sum: 4bfa18d651abd945e240b05e70107716 - path: output/gatk4/versions.yml - md5sum: cb4cb8a62e117b4adc643ae47883d53c - name: gatk4 applybqsr test_gatk4_applybqsr_cram command: nextflow run tests/modules/gatk4/applybqsr -entry test_gatk4_applybqsr_cram -c tests/config/nextflow.config -c ./tests/modules/gatk4/applybqsr/nextflow.config @@ -29,4 +27,3 @@ - path: output/gatk4/test.cram md5sum: 2e0bca197af4f043a4a85152e6edbe04 - path: output/gatk4/versions.yml - md5sum: 1efaa18be943bab4e4c54191d6eaa260 diff --git a/tests/modules/gatk4/applybqsrspark/main.nf b/tests/modules/gatk4/applybqsrspark/main.nf new file mode 100644 index 00000000..ee1f88dd --- /dev/null +++ b/tests/modules/gatk4/applybqsrspark/main.nf @@ -0,0 +1,47 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { GATK4_APPLYBQSR_SPARK } from '../../../../modules/gatk4/applybqsrspark/main.nf' + +workflow test_gatk4_applybqsr_spark { + input = [ [ id:'test' ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_baserecalibrator_table'], checkIfExists: true), + [] + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['sarscov2']['genome']['genome_fasta_fai'], checkIfExists: true) + dict = file(params.test_data['sarscov2']['genome']['genome_dict'], checkIfExists: true) + + GATK4_APPLYBQSR_SPARK ( input, fasta, fai, dict ) +} + +workflow test_gatk4_applybqsr_spark_intervals { + input = [ [ id:'test' ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_baserecalibrator_table'], checkIfExists: true), + file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true) + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['sarscov2']['genome']['genome_fasta_fai'], checkIfExists: true) + dict = file(params.test_data['sarscov2']['genome']['genome_dict'], checkIfExists: true) + + GATK4_APPLYBQSR_SPARK ( input, fasta, fai, dict ) +} + +workflow test_gatk4_applybqsr_spark_cram { + input = [ [ id:'test' ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram_crai'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_baserecalibrator_table'], checkIfExists: true), + file(params.test_data['homo_sapiens']['genome']['genome_bed'], checkIfExists: true) + ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + + GATK4_APPLYBQSR_SPARK ( input, fasta, fai, dict ) +} diff --git a/tests/modules/gatk4/applybqsrspark/nextflow.config b/tests/modules/gatk4/applybqsrspark/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gatk4/applybqsrspark/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gatk4/applybqsrspark/test.yml b/tests/modules/gatk4/applybqsrspark/test.yml new file mode 100644 index 00000000..d230c000 --- /dev/null +++ b/tests/modules/gatk4/applybqsrspark/test.yml @@ -0,0 +1,29 @@ +- name: gatk4 applybqsr test_gatk4_applybqsr_spark + command: nextflow run tests/modules/gatk4/applybqsrspark -entry test_gatk4_applybqsr_spark -c tests/config/nextflow.config -c ./tests/modules/gatk4/applybqsrspark/nextflow.config + tags: + - gatk4 + - gatk4/applybqsrspark + files: + - path: output/gatk4/test.bam + md5sum: d088422be886dc8507ff97fcc7dd968a + - path: output/gatk4/versions.yml + +- name: gatk4 applybqsr test_gatk4_applybqsr_spark_intervals + command: nextflow run tests/modules/gatk4/applybqsrspark -entry test_gatk4_applybqsr_spark_intervals -c tests/config/nextflow.config -c ./tests/modules/gatk4/applybqsrspark/nextflow.config + tags: + - gatk4 + - gatk4/applybqsrspark + files: + - path: output/gatk4/test.bam + md5sum: 4bfa18d651abd945e240b05e70107716 + - path: output/gatk4/versions.yml + +- name: gatk4 applybqsr test_gatk4_applybqsr_spark_cram + command: nextflow run tests/modules/gatk4/applybqsrspark -entry test_gatk4_applybqsr_spark_cram -c tests/config/nextflow.config -c ./tests/modules/gatk4/applybqsrspark/nextflow.config + tags: + - gatk4 + - gatk4/applybqsrspark + files: + - path: output/gatk4/test.cram + md5sum: 2e0bca197af4f043a4a85152e6edbe04 + - path: output/gatk4/versions.yml diff --git a/tests/modules/gatk4/applyvqsr/test.yml b/tests/modules/gatk4/applyvqsr/test.yml index 7cb91c43..5b367bcc 100644 --- a/tests/modules/gatk4/applyvqsr/test.yml +++ b/tests/modules/gatk4/applyvqsr/test.yml @@ -7,7 +7,6 @@ - path: output/gatk4/test.vcf.gz - path: output/gatk4/test.vcf.gz.tbi - path: output/gatk4/versions.yml - md5sum: ce9c443375683e7f2958fe958759ad29 - name: gatk4 applyvqsr test_gatk4_applyvqsr_allele_specific command: nextflow run tests/modules/gatk4/applyvqsr -entry test_gatk4_applyvqsr_allele_specific -c tests/config/nextflow.config -c ./tests/modules/gatk4/applyvqsr/nextflow.config @@ -18,4 +17,3 @@ - path: output/gatk4/test.vcf.gz - path: output/gatk4/test.vcf.gz.tbi - path: output/gatk4/versions.yml - md5sum: 521353d12d576de2864f1d18a3e54f14 diff --git a/tests/modules/gatk4/baserecalibrator/test.yml b/tests/modules/gatk4/baserecalibrator/test.yml index 163fac08..ec103dd4 100644 --- a/tests/modules/gatk4/baserecalibrator/test.yml +++ b/tests/modules/gatk4/baserecalibrator/test.yml @@ -6,6 +6,7 @@ files: - path: output/gatk4/test.table md5sum: e2e43abdc0c943c1a54dae816d0b9ea7 + - path: output/gatk4/versions.yml - name: gatk4 baserecalibrator test_gatk4_baserecalibrator_cram command: nextflow run ./tests/modules/gatk4/baserecalibrator -entry test_gatk4_baserecalibrator_cram -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/baserecalibrator/nextflow.config @@ -15,6 +16,7 @@ files: - path: output/gatk4/test.table md5sum: 35d89a3811aa31711fc9815b6b80e6ec + - path: output/gatk4/versions.yml - name: gatk4 baserecalibrator test_gatk4_baserecalibrator_intervals command: nextflow run ./tests/modules/gatk4/baserecalibrator -entry test_gatk4_baserecalibrator_intervals -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/baserecalibrator/nextflow.config @@ -24,6 +26,7 @@ files: - path: output/gatk4/test.table md5sum: 9ecb5f00a2229291705addc09c0ec231 + - path: output/gatk4/versions.yml - name: gatk4 baserecalibrator test_gatk4_baserecalibrator_multiple_sites command: nextflow run ./tests/modules/gatk4/baserecalibrator -entry test_gatk4_baserecalibrator_multiple_sites -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/baserecalibrator/nextflow.config @@ -33,3 +36,4 @@ files: - path: output/gatk4/test.table md5sum: e2e43abdc0c943c1a54dae816d0b9ea7 + - path: output/gatk4/versions.yml diff --git a/tests/modules/gatk4/baserecalibratorspark/main.nf b/tests/modules/gatk4/baserecalibratorspark/main.nf new file mode 100644 index 00000000..8419e16b --- /dev/null +++ b/tests/modules/gatk4/baserecalibratorspark/main.nf @@ -0,0 +1,69 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { GATK4_BASERECALIBRATOR_SPARK } from '../../../../modules/gatk4/baserecalibratorspark/main.nf' + +workflow test_gatk4_baserecalibrator_spark { + input = [ [ id:'test' ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true), + [] + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['sarscov2']['genome']['genome_fasta_fai'], checkIfExists: true) + dict = file(params.test_data['sarscov2']['genome']['genome_dict'], checkIfExists: true) + sites = file(params.test_data['sarscov2']['illumina']['test_vcf_gz'], checkIfExists: true) + sites_tbi = file(params.test_data['sarscov2']['illumina']['test_vcf_gz_tbi'], checkIfExists: true) + + GATK4_BASERECALIBRATOR_SPARK ( input, fasta, fai, dict, sites, sites_tbi ) +} + +workflow test_gatk4_baserecalibrator_spark_cram { + input = [ [ id:'test' ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_cram_crai'], checkIfExists: true), + [] + ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + sites = file(params.test_data['homo_sapiens']['genome']['dbsnp_146_hg38_vcf_gz'], checkIfExists: true) + sites_tbi = file(params.test_data['homo_sapiens']['genome']['dbsnp_146_hg38_vcf_gz_tbi'], checkIfExists: true) + + GATK4_BASERECALIBRATOR_SPARK ( input, fasta, fai, dict, sites, sites_tbi ) +} + +workflow test_gatk4_baserecalibrator_spark_intervals { + input = [ [ id:'test' ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true), + file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true) + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['sarscov2']['genome']['genome_fasta_fai'], checkIfExists: true) + dict = file(params.test_data['sarscov2']['genome']['genome_dict'], checkIfExists: true) + sites = file(params.test_data['sarscov2']['illumina']['test_vcf_gz'], checkIfExists: true) + sites_tbi = file(params.test_data['sarscov2']['illumina']['test_vcf_gz_tbi'], checkIfExists: true) + + GATK4_BASERECALIBRATOR_SPARK ( input, fasta, fai, dict, sites, sites_tbi ) +} + +workflow test_gatk4_baserecalibrator_spark_multiple_sites { + input = [ [ id:'test' ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true), + [] + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['sarscov2']['genome']['genome_fasta_fai'], checkIfExists: true) + dict = file(params.test_data['sarscov2']['genome']['genome_dict'], checkIfExists: true) + sites = [ file(params.test_data['sarscov2']['illumina']['test_vcf_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test2_vcf_gz'], checkIfExists: true) + ] + sites_tbi = [ file(params.test_data['sarscov2']['illumina']['test_vcf_gz_tbi'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test2_vcf_gz_tbi'], checkIfExists: true) + ] + + GATK4_BASERECALIBRATOR_SPARK ( input, fasta, fai, dict, sites, sites_tbi ) +} diff --git a/tests/modules/gatk4/baserecalibratorspark/nextflow.config b/tests/modules/gatk4/baserecalibratorspark/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gatk4/baserecalibratorspark/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gatk4/baserecalibratorspark/test.yml b/tests/modules/gatk4/baserecalibratorspark/test.yml new file mode 100644 index 00000000..6eb9d91d --- /dev/null +++ b/tests/modules/gatk4/baserecalibratorspark/test.yml @@ -0,0 +1,39 @@ +- name: gatk4 baserecalibrator test_gatk4_baserecalibrator_spark + command: nextflow run ./tests/modules/gatk4/baserecalibratorspark -entry test_gatk4_baserecalibrator_spark -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/baserecalibratorspark/nextflow.config + tags: + - gatk4 + - gatk4/baserecalibratorspark + files: + - path: output/gatk4/test.table + md5sum: e2e43abdc0c943c1a54dae816d0b9ea7 + - path: output/gatk4/versions.yml + +- name: gatk4 baserecalibrator test_gatk4_baserecalibrator_spark_cram + command: nextflow run ./tests/modules/gatk4/baserecalibratorspark -entry test_gatk4_baserecalibrator_spark_cram -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/baserecalibratorspark/nextflow.config + tags: + - gatk4 + - gatk4/baserecalibratorspark + files: + - path: output/gatk4/test.table + md5sum: 35d89a3811aa31711fc9815b6b80e6ec + - path: output/gatk4/versions.yml + +- name: gatk4 baserecalibrator test_gatk4_baserecalibrator_spark_intervals + command: nextflow run ./tests/modules/gatk4/baserecalibratorspark -entry test_gatk4_baserecalibrator_spark_intervals -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/baserecalibratorspark/nextflow.config + tags: + - gatk4 + - gatk4/baserecalibratorspark + files: + - path: output/gatk4/test.table + md5sum: 9ecb5f00a2229291705addc09c0ec231 + - path: output/gatk4/versions.yml + +- name: gatk4 baserecalibrator test_gatk4_baserecalibrator_spark_multiple_sites + command: nextflow run ./tests/modules/gatk4/baserecalibratorspark -entry test_gatk4_baserecalibrator_spark_multiple_sites -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/baserecalibratorspark/nextflow.config + tags: + - gatk4 + - gatk4/baserecalibratorspark + files: + - path: output/gatk4/test.table + md5sum: e2e43abdc0c943c1a54dae816d0b9ea7 + - path: output/gatk4/versions.yml diff --git a/tests/modules/gatk4/bedtointervallist/test.yml b/tests/modules/gatk4/bedtointervallist/test.yml index 3482fa6c..d8eade51 100644 --- a/tests/modules/gatk4/bedtointervallist/test.yml +++ b/tests/modules/gatk4/bedtointervallist/test.yml @@ -6,3 +6,4 @@ files: - path: output/gatk4/test.interval_list md5sum: e51101c9357fb2d59fd30e370eefa39c + - path: output/gatk4/versions.yml diff --git a/tests/modules/gatk4/calculatecontamination/main.nf b/tests/modules/gatk4/calculatecontamination/main.nf index 4b659ed3..c6e085b1 100644 --- a/tests/modules/gatk4/calculatecontamination/main.nf +++ b/tests/modules/gatk4/calculatecontamination/main.nf @@ -2,7 +2,8 @@ nextflow.enable.dsl = 2 -include { GATK4_CALCULATECONTAMINATION } from '../../../../modules/gatk4/calculatecontamination/main.nf' +include { GATK4_CALCULATECONTAMINATION } from '../../../../modules/gatk4/calculatecontamination/main.nf' +include { GATK4_CALCULATECONTAMINATION as GATK4_CALCULATECONTAMINATION_SEGMENTATION } from '../../../../modules/gatk4/calculatecontamination/main.nf' workflow test_gatk4_calculatecontamination_tumor_only { @@ -10,9 +11,7 @@ workflow test_gatk4_calculatecontamination_tumor_only { file(params.test_data['homo_sapiens']['illumina']['test2_pileups_table'], checkIfExists: true), [] ] - segmentout = false - - GATK4_CALCULATECONTAMINATION ( input, segmentout ) + GATK4_CALCULATECONTAMINATION ( input ) } workflow test_gatk4_calculatecontamination_matched_pair { @@ -21,9 +20,7 @@ workflow test_gatk4_calculatecontamination_matched_pair { file(params.test_data['homo_sapiens']['illumina']['test2_pileups_table'], checkIfExists: true), file(params.test_data['homo_sapiens']['illumina']['test_pileups_table'], checkIfExists: true) ] - segmentout = false - - GATK4_CALCULATECONTAMINATION ( input, segmentout ) + GATK4_CALCULATECONTAMINATION ( input ) } workflow test_gatk4_calculatecontamination_segmentation { @@ -32,7 +29,5 @@ workflow test_gatk4_calculatecontamination_segmentation { file(params.test_data['homo_sapiens']['illumina']['test2_pileups_table'], checkIfExists: true), file(params.test_data['homo_sapiens']['illumina']['test_pileups_table'], checkIfExists: true) ] - segmentout = true - - GATK4_CALCULATECONTAMINATION ( input, segmentout ) + GATK4_CALCULATECONTAMINATION_SEGMENTATION ( input ) } diff --git a/tests/modules/gatk4/calculatecontamination/nextflow.config b/tests/modules/gatk4/calculatecontamination/nextflow.config index 8730f1c4..3789a000 100644 --- a/tests/modules/gatk4/calculatecontamination/nextflow.config +++ b/tests/modules/gatk4/calculatecontamination/nextflow.config @@ -2,4 +2,8 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + withName: GATK4_CALCULATECONTAMINATION_SEGMENTATION { + ext.args = { "--tumor-segmentation ${meta.id}.segmentation.table" } + } + } diff --git a/tests/modules/gatk4/calculatecontamination/test.yml b/tests/modules/gatk4/calculatecontamination/test.yml index 4cd44281..a00e26f0 100644 --- a/tests/modules/gatk4/calculatecontamination/test.yml +++ b/tests/modules/gatk4/calculatecontamination/test.yml @@ -7,7 +7,6 @@ - path: output/gatk4/test.contamination.table md5sum: 46c708c943b453da89a3da08acfdb2a7 - path: output/gatk4/versions.yml - md5sum: 3da8f1c0de968886330a3f7a3a1c6616 - name: gatk4 calculatecontamination test_gatk4_calculatecontamination_matched_pair command: nextflow run tests/modules/gatk4/calculatecontamination -entry test_gatk4_calculatecontamination_matched_pair -c tests/config/nextflow.config -c ./tests/modules/gatk4/calculatecontamination/nextflow.config @@ -18,7 +17,6 @@ - path: output/gatk4/test.contamination.table md5sum: 46c708c943b453da89a3da08acfdb2a7 - path: output/gatk4/versions.yml - md5sum: 14ab12a71b0c2b87d8cd53639a991b3a - name: gatk4 calculatecontamination test_gatk4_calculatecontamination_segmentation command: nextflow run tests/modules/gatk4/calculatecontamination -entry test_gatk4_calculatecontamination_segmentation -c tests/config/nextflow.config -c ./tests/modules/gatk4/calculatecontamination/nextflow.config @@ -31,4 +29,3 @@ - path: output/gatk4/test.segmentation.table md5sum: f4643d9319bde4efbfbe516d6fb13052 - path: output/gatk4/versions.yml - md5sum: d2e61315de31f512e448f0cb4b77db54 diff --git a/tests/modules/gatk4/combinegvcfs/test.yml b/tests/modules/gatk4/combinegvcfs/test.yml index 54948668..762a72f3 100644 --- a/tests/modules/gatk4/combinegvcfs/test.yml +++ b/tests/modules/gatk4/combinegvcfs/test.yml @@ -7,4 +7,3 @@ - path: output/gatk4/test.combined.g.vcf.gz contains: ["VCFv4.2"] - path: output/gatk4/versions.yml - md5sum: 49d9c467f84b6a99a4da3ef161af26bd diff --git a/tests/modules/gatk4/createsequencedictionary/test.yml b/tests/modules/gatk4/createsequencedictionary/test.yml index 134a9d74..3656e0e2 100644 --- a/tests/modules/gatk4/createsequencedictionary/test.yml +++ b/tests/modules/gatk4/createsequencedictionary/test.yml @@ -6,3 +6,4 @@ files: - path: output/gatk4/genome.dict md5sum: 7362679f176e0f52add03c08f457f646 + - path: output/gatk4/versions.yml diff --git a/tests/modules/gatk4/createsomaticpanelofnormals/test.yml b/tests/modules/gatk4/createsomaticpanelofnormals/test.yml index a0e2bf26..00d8cae9 100644 --- a/tests/modules/gatk4/createsomaticpanelofnormals/test.yml +++ b/tests/modules/gatk4/createsomaticpanelofnormals/test.yml @@ -7,3 +7,4 @@ - path: output/gatk4/test.pon.vcf.gz - path: output/gatk4/test.pon.vcf.gz.tbi md5sum: e7ca7e9fe76ce12198fd54ec9a64fad4 + - path: output/gatk4/versions.yml diff --git a/tests/modules/gatk4/estimatelibrarycomplexity/test.yml b/tests/modules/gatk4/estimatelibrarycomplexity/test.yml index a33e4ec1..cf5d187f 100644 --- a/tests/modules/gatk4/estimatelibrarycomplexity/test.yml +++ b/tests/modules/gatk4/estimatelibrarycomplexity/test.yml @@ -5,3 +5,4 @@ - gatk4 files: - path: output/gatk4/test.metrics + - path: output/gatk4/versions.yml diff --git a/tests/modules/gatk4/fastqtosam/test.yml b/tests/modules/gatk4/fastqtosam/test.yml index 07f9af15..f4b2c561 100644 --- a/tests/modules/gatk4/fastqtosam/test.yml +++ b/tests/modules/gatk4/fastqtosam/test.yml @@ -6,7 +6,6 @@ files: - path: output/gatk4/test.bam - path: output/gatk4/versions.yml - md5sum: 381cdb2496b2fcc7bbc371a6e4156c7e - name: gatk4 fastqtosam test_gatk4_fastqtosam_paired_end command: nextflow run tests/modules/gatk4/fastqtosam -entry test_gatk4_fastqtosam_paired_end -c tests/config/nextflow.config -c ./tests/modules/gatk4/fastqtosam/nextflow.config @@ -16,4 +15,3 @@ files: - path: output/gatk4/test.bam - path: output/gatk4/versions.yml - md5sum: 1d07c90cbd31992c9ba003f02d1b3502 diff --git a/tests/modules/gatk4/filtermutectcalls/test.yml b/tests/modules/gatk4/filtermutectcalls/test.yml index 12cf4e69..6f650f32 100644 --- a/tests/modules/gatk4/filtermutectcalls/test.yml +++ b/tests/modules/gatk4/filtermutectcalls/test.yml @@ -8,6 +8,7 @@ - path: output/gatk4/test.filtered.vcf.gz.filteringStats.tsv md5sum: 55f228e5520c8b9fbac017d3a3a6c5fd - path: output/gatk4/test.filtered.vcf.gz.tbi + - path: output/gatk4/versions.yml - name: gatk4 filtermutectcalls test_gatk4_filtermutectcalls_with_files command: nextflow run ./tests/modules/gatk4/filtermutectcalls -entry test_gatk4_filtermutectcalls_with_files -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/filtermutectcalls/nextflow.config @@ -19,6 +20,7 @@ - path: output/gatk4/test.filtered.vcf.gz.filteringStats.tsv md5sum: 9ae27fbd04af1a2ea574e2ff1c3a683b - path: output/gatk4/test.filtered.vcf.gz.tbi + - path: output/gatk4/versions.yml - name: gatk4 filtermutectcalls test_gatk4_filtermutectcalls_use_val command: nextflow run ./tests/modules/gatk4/filtermutectcalls -entry test_gatk4_filtermutectcalls_use_val -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/filtermutectcalls/nextflow.config @@ -30,3 +32,4 @@ - path: output/gatk4/test.filtered.vcf.gz.filteringStats.tsv md5sum: 95cc3e37705bd3b97a292c5d46ab82f3 - path: output/gatk4/test.filtered.vcf.gz.tbi + - path: output/gatk4/versions.yml diff --git a/tests/modules/gatk4/gatherbqsrreports/test.yml b/tests/modules/gatk4/gatherbqsrreports/test.yml index 576889de..2fb4917a 100644 --- a/tests/modules/gatk4/gatherbqsrreports/test.yml +++ b/tests/modules/gatk4/gatherbqsrreports/test.yml @@ -7,7 +7,6 @@ - path: output/gatk4/test.table md5sum: 9603b69fdc3b5090de2e0dd78bfcc4bf - path: output/gatk4/versions.yml - md5sum: 8d52c5aaab73294e9ea5491b95f3e1e1 - name: gatk4 gatherbqsrreports test_gatk4_gatherbqsrreports_multiple command: nextflow run tests/modules/gatk4/gatherbqsrreports -entry test_gatk4_gatherbqsrreports_multiple -c tests/config/nextflow.config @@ -18,4 +17,3 @@ - path: output/gatk4/test.table md5sum: 0c1257eececf95db8ca378272d0f21f9 - path: output/gatk4/versions.yml - md5sum: 91cad396b9f2045c3cd8c0f256672e80 diff --git a/tests/modules/gatk4/gatherpileupsummaries/test.yml b/tests/modules/gatk4/gatherpileupsummaries/test.yml index 0c38a602..efd02f52 100644 --- a/tests/modules/gatk4/gatherpileupsummaries/test.yml +++ b/tests/modules/gatk4/gatherpileupsummaries/test.yml @@ -6,3 +6,4 @@ files: - path: output/gatk4/test.pileupsummaries.table md5sum: 8e0ca6f66e112bd2f7ec1d31a2d62469 + - path: output/gatk4/versions.yml diff --git a/tests/modules/gatk4/genomicsdbimport/test.yml b/tests/modules/gatk4/genomicsdbimport/test.yml index 5c4ea2bb..765cbfbe 100644 --- a/tests/modules/gatk4/genomicsdbimport/test.yml +++ b/tests/modules/gatk4/genomicsdbimport/test.yml @@ -5,11 +5,9 @@ - gatk4/genomicsdbimport files: - path: output/gatk4/test/__tiledb_workspace.tdb - md5sum: d41d8cd98f00b204e9800998ecf8427e - path: output/gatk4/test/callset.json md5sum: a7d07d1c86449bbb1091ff29368da07a - path: output/gatk4/test/chr22$1$40001/.__consolidation_lock - md5sum: d41d8cd98f00b204e9800998ecf8427e - path: output/gatk4/test/chr22$1$40001/__array_schema.tdb - path: output/gatk4/test/chr22$1$40001/genomicsdb_meta_dir/genomicsdb_column_bounds.json md5sum: 2502f79658bc000578ebcfddfc1194c0 @@ -19,7 +17,6 @@ - path: output/gatk4/test/vidmap.json md5sum: 18d3f68bd2cb6f4474990507ff95017a - path: output/gatk4/versions.yml - md5sum: 91f5c3e9529982f9c819860b403576ce - name: gatk4 genomicsdbimport test_gatk4_genomicsdbimport_get_intervalslist command: nextflow run tests/modules/gatk4/genomicsdbimport -entry test_gatk4_genomicsdbimport_get_intervalslist -c tests/config/nextflow.config -c ./tests/modules/gatk4/genomicsdbimport/nextflow.config @@ -30,9 +27,7 @@ - path: output/gatk4/test.interval_list md5sum: 4c85812ac15fc1cd29711a851d23c0bf - path: output/gatk4/versions.yml - md5sum: a898fe1cbc4acfa5936c0ffdcf121401 - path: output/untar/versions.yml - md5sum: 8f080677b109aea2cfca50208b077534 - name: gatk4 genomicsdbimport test_gatk4_genomicsdbimport_update_genomicsdb command: nextflow run tests/modules/gatk4/genomicsdbimport -entry test_gatk4_genomicsdbimport_update_genomicsdb -c tests/config/nextflow.config -c ./tests/modules/gatk4/genomicsdbimport/nextflow.config @@ -41,11 +36,9 @@ - gatk4/genomicsdbimport files: - path: output/gatk4/test_genomicsdb/__tiledb_workspace.tdb - md5sum: d41d8cd98f00b204e9800998ecf8427e - path: output/gatk4/test_genomicsdb/callset.json md5sum: 1ea31b59b9a218dd5681164aff4a5e07 - path: output/gatk4/test_genomicsdb/chr22$1$40001/.__consolidation_lock - md5sum: d41d8cd98f00b204e9800998ecf8427e - path: output/gatk4/test_genomicsdb/chr22$1$40001/__array_schema.tdb md5sum: 6709e67921ae840bf61fbfb192554eda - path: output/gatk4/test_genomicsdb/chr22$1$40001/genomicsdb_meta_dir/genomicsdb_column_bounds.json @@ -55,6 +48,4 @@ - path: output/gatk4/test_genomicsdb/vidmap.json md5sum: 18d3f68bd2cb6f4474990507ff95017a - path: output/gatk4/versions.yml - md5sum: d87baa3f4218c5554cad3c008cb6cbc4 - path: output/untar/versions.yml - md5sum: 9b2916aea9790bdf427c0cb38109110c diff --git a/tests/modules/gatk4/genotypegvcfs/main.nf b/tests/modules/gatk4/genotypegvcfs/main.nf index a5ae8d46..75990958 100644 --- a/tests/modules/gatk4/genotypegvcfs/main.nf +++ b/tests/modules/gatk4/genotypegvcfs/main.nf @@ -9,93 +9,96 @@ include { UNTAR } from '../../../../modules/untar/main.nf' workflow test_gatk4_genotypegvcfs_vcf_input { input = [ [ id:'test' ], // meta map - file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf'], checkIfExists: true), - file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_idx'], checkIfExists: true), - [] - ] + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_idx'], checkIfExists: true), + [], + [] + ] - fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) - fastaIndex = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) - fastaDict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) - GATK4_GENOTYPEGVCFS ( input, fasta, fastaIndex, fastaDict, [], []) + GATK4_GENOTYPEGVCFS ( input, fasta, fai, dict, [], []) } // Basic parameters with compressed VCF input workflow test_gatk4_genotypegvcfs_gz_input { input = [ [ id:'test' ], // meta map - file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz'], checkIfExists: true), - file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz_tbi'], checkIfExists: true), - [] - ] + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz_tbi'], checkIfExists: true), + [], + [] + ] - fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) - fastaIndex = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) - fastaDict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) - GATK4_GENOTYPEGVCFS ( input, fasta, fastaIndex, fastaDict, [], []) + GATK4_GENOTYPEGVCFS ( input, fasta, fai, dict, [], []) } // Basic parameters + optional dbSNP workflow test_gatk4_genotypegvcfs_gz_input_dbsnp { input = [ [ id:'test' ], // meta map - file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz'], checkIfExists: true), - file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz_tbi'], checkIfExists: true), - [] - ] + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz_tbi'], checkIfExists: true), + [], + [] + ] - fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) - fastaIndex = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) - fastaDict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) - dbsnp = file(params.test_data['homo_sapiens']['genome']['dbsnp_146_hg38_vcf_gz'], checkIfExists: true) - dbsnpIndex = file(params.test_data['homo_sapiens']['genome']['dbsnp_146_hg38_vcf_gz_tbi'], checkIfExists: true) + dbsnp = file(params.test_data['homo_sapiens']['genome']['dbsnp_146_hg38_vcf_gz'], checkIfExists: true) + dbsnp_tbi = file(params.test_data['homo_sapiens']['genome']['dbsnp_146_hg38_vcf_gz_tbi'], checkIfExists: true) - GATK4_GENOTYPEGVCFS ( input, fasta, fastaIndex, fastaDict, dbsnp, dbsnpIndex) + GATK4_GENOTYPEGVCFS ( input, fasta, fai, dict, dbsnp, dbsnp_tbi) } // Basic parameters + optional intervals workflow test_gatk4_genotypegvcfs_gz_input_intervals { input = [ [ id:'test' ], // meta map - file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz'], checkIfExists: true), - file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz_tbi'], checkIfExists: true), - file(params.test_data['homo_sapiens']['genome']['genome_bed'], checkIfExists: true) ] + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz_tbi'], checkIfExists: true), + file(params.test_data['homo_sapiens']['genome']['genome_bed'], checkIfExists: true) ] - fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) - fastaIndex = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) - fastaDict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) - GATK4_GENOTYPEGVCFS ( input, fasta, fastaIndex, fastaDict, [], []) + GATK4_GENOTYPEGVCFS ( input, fasta, fai, dict, [], []) } // Basic parameters + optional dbSNP + optional intervals workflow test_gatk4_genotypegvcfs_gz_input_dbsnp_intervals { input = [ [ id:'test' ], // meta map - file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz'], checkIfExists: true), - file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz_tbi'], checkIfExists: true), - file(params.test_data['homo_sapiens']['genome']['genome_bed'], checkIfExists: true) - ] + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_genome_vcf_gz_tbi'], checkIfExists: true), + file(params.test_data['homo_sapiens']['genome']['genome_bed'], checkIfExists: true) + ] - fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) - fastaIndex = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) - fastaDict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) - dbsnp = file(params.test_data['homo_sapiens']['genome']['dbsnp_146_hg38_vcf_gz'], checkIfExists: true) - dbsnpIndex = file(params.test_data['homo_sapiens']['genome']['dbsnp_146_hg38_vcf_gz_tbi'], checkIfExists: true) + dbsnp = file(params.test_data['homo_sapiens']['genome']['dbsnp_146_hg38_vcf_gz'], checkIfExists: true) + dbsnp_tbi = file(params.test_data['homo_sapiens']['genome']['dbsnp_146_hg38_vcf_gz_tbi'], checkIfExists: true) - GATK4_GENOTYPEGVCFS ( input, fasta, fastaIndex, fastaDict, dbsnp, dbsnpIndex ) + GATK4_GENOTYPEGVCFS ( input, fasta, fai, dict, dbsnp, dbsnp_tbi ) } // Basic parameters with GenomicsDB input workflow test_gatk4_genotypegvcfs_gendb_input { - fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) - fastaIndex = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) - fastaDict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) test_genomicsdb = [ [], file(params.test_data['homo_sapiens']['illumina']['test_genomicsdb_tar_gz'], checkIfExists: true) ] @@ -106,18 +109,18 @@ workflow test_gatk4_genotypegvcfs_gendb_input { input = Channel.of([ id:'test' ]).combine(gendb) - GATK4_GENOTYPEGVCFS ( input, fasta, fastaIndex, fastaDict, [], []) + GATK4_GENOTYPEGVCFS ( input, fasta, fai, dict, [], []) } // Basic parameters with GenomicsDB + optional dbSNP workflow test_gatk4_genotypegvcfs_gendb_input_dbsnp { - fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) - fastaIndex = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) - fastaDict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) - dbsnp = file(params.test_data['homo_sapiens']['genome']['dbsnp_146_hg38_vcf_gz'], checkIfExists: true) - dbsnpIndex = file(params.test_data['homo_sapiens']['genome']['dbsnp_146_hg38_vcf_gz_tbi'], checkIfExists: true) + dbsnp = file(params.test_data['homo_sapiens']['genome']['dbsnp_146_hg38_vcf_gz'], checkIfExists: true) + dbsnp_tbi = file(params.test_data['homo_sapiens']['genome']['dbsnp_146_hg38_vcf_gz_tbi'], checkIfExists: true) test_genomicsdb = [ [], file(params.test_data['homo_sapiens']['illumina']['test_genomicsdb_tar_gz'], checkIfExists: true) ] @@ -127,15 +130,15 @@ workflow test_gatk4_genotypegvcfs_gendb_input_dbsnp { gendb.add([]) input = Channel.of([ id:'test' ]).combine(gendb) - GATK4_GENOTYPEGVCFS ( input, fasta, fastaIndex, fastaDict, dbsnp, dbsnpIndex) + GATK4_GENOTYPEGVCFS ( input, fasta, fai, dict, dbsnp, dbsnp_tbi) } // Basic parameters with GenomicsDB + optional intervals workflow test_gatk4_genotypegvcfs_gendb_input_intervals { - fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) - fastaIndex = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) - fastaDict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) test_genomicsdb = [ [], file(params.test_data['homo_sapiens']['illumina']['test_genomicsdb_tar_gz'], checkIfExists: true) ] @@ -145,18 +148,18 @@ workflow test_gatk4_genotypegvcfs_gendb_input_intervals { gendb.add([file(params.test_data['homo_sapiens']['genome']['genome_bed'], checkIfExists: true)]) input = Channel.of([ id:'test' ]).combine(gendb) - GATK4_GENOTYPEGVCFS ( input, fasta, fastaIndex, fastaDict, [], [] ) + GATK4_GENOTYPEGVCFS ( input, fasta, fai, dict, [], [] ) } // Basic parameters with GenomicsDB + optional dbSNP + optional intervals workflow test_gatk4_genotypegvcfs_gendb_input_dbsnp_intervals { - fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) - fastaIndex = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) - fastaDict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) - dbsnp = file(params.test_data['homo_sapiens']['genome']['dbsnp_146_hg38_vcf_gz'], checkIfExists: true) - dbsnpIndex = file(params.test_data['homo_sapiens']['genome']['dbsnp_146_hg38_vcf_gz_tbi'], checkIfExists: true) + dbsnp = file(params.test_data['homo_sapiens']['genome']['dbsnp_146_hg38_vcf_gz'], checkIfExists: true) + dbsnp_tbi = file(params.test_data['homo_sapiens']['genome']['dbsnp_146_hg38_vcf_gz_tbi'], checkIfExists: true) test_genomicsdb = [ [], file(params.test_data['homo_sapiens']['illumina']['test_genomicsdb_tar_gz'], checkIfExists: true) ] @@ -166,5 +169,5 @@ workflow test_gatk4_genotypegvcfs_gendb_input_dbsnp_intervals { gendb.add([file(params.test_data['homo_sapiens']['genome']['genome_bed'], checkIfExists: true)]) input = Channel.of([ id:'test' ]).combine(gendb) - GATK4_GENOTYPEGVCFS ( input, fasta, fastaIndex, fastaDict, dbsnp, dbsnpIndex ) + GATK4_GENOTYPEGVCFS ( input, fasta, fai, dict, dbsnp, dbsnp_tbi ) } diff --git a/tests/modules/gatk4/genotypegvcfs/test.yml b/tests/modules/gatk4/genotypegvcfs/test.yml index dff79460..ec8ee951 100644 --- a/tests/modules/gatk4/genotypegvcfs/test.yml +++ b/tests/modules/gatk4/genotypegvcfs/test.yml @@ -10,6 +10,7 @@ "AC=1;AF=0.500;AN=2;BaseQRankSum=0.00;DP=211;ExcessHet=0.0000;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=60.00;MQRankSum=0.00;QD=0.95;ReadPosRankSum=1.09;SOR=0.680", ] - path: output/gatk4/test.genotyped.vcf.gz.tbi + - path: output/gatk4/versions.yml - name: gatk4 genotypegvcfs test_gatk4_genotypegvcfs_gz_input command: nextflow run ./tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gz_input -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/genotypegvcfs/nextflow.config @@ -23,6 +24,7 @@ "AC=1;AF=0.500;AN=2;BaseQRankSum=0.00;DP=211;ExcessHet=0.0000;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=60.00;MQRankSum=0.00;QD=0.95;ReadPosRankSum=1.09;SOR=0.680", ] - path: output/gatk4/test.genotyped.vcf.gz.tbi + - path: output/gatk4/versions.yml - name: gatk4 genotypegvcfs test_gatk4_genotypegvcfs_gz_input_dbsnp command: nextflow run ./tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gz_input_dbsnp -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/genotypegvcfs/nextflow.config @@ -36,6 +38,7 @@ "AC=1;AF=0.500;AN=2;BaseQRankSum=0.00;DB;DP=211;ExcessHet=0.0000;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=60.00;MQRankSum=0.00;QD=0.95;ReadPosRankSum=1.09;SOR=0.680", ] - path: output/gatk4/test.genotyped.vcf.gz.tbi + - path: output/gatk4/versions.yml - name: gatk4 genotypegvcfs test_gatk4_genotypegvcfs_gz_input_intervals command: nextflow run ./tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gz_input_intervals -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/genotypegvcfs/nextflow.config @@ -49,6 +52,7 @@ "AC=1;AF=0.500;AN=2;BaseQRankSum=0.00;DP=211;ExcessHet=0.0000;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=60.00;MQRankSum=0.00;QD=0.95;ReadPosRankSum=1.09;SOR=0.680", ] - path: output/gatk4/test.genotyped.vcf.gz.tbi + - path: output/gatk4/versions.yml - name: gatk4 genotypegvcfs test_gatk4_genotypegvcfs_gz_input_dbsnp_intervals command: nextflow run ./tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gz_input_dbsnp_intervals -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/genotypegvcfs/nextflow.config @@ -59,6 +63,7 @@ - path: output/gatk4/test.genotyped.vcf.gz contains: ["AC=2;AF=1.00;AN=2;DB;DP=20;ExcessHet=0.0000;FS=0.000;MLEAC=2;MLEAF=1.00;MQ=60.00;QD=24.05;SOR=0.693"] - path: output/gatk4/test.genotyped.vcf.gz.tbi + - path: output/gatk4/versions.yml - name: gatk4 genotypegvcfs test_gatk4_genotypegvcfs_gendb_input command: nextflow run ./tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gendb_input -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/genotypegvcfs/nextflow.config @@ -72,6 +77,7 @@ "AC=1;AF=0.500;AN=2;BaseQRankSum=0.00;DP=211;ExcessHet=0.0000;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=60.00;MQRankSum=0.00;QD=0.95;ReadPosRankSum=1.09;SOR=0.680", ] - path: output/gatk4/test.genotyped.vcf.gz.tbi + - path: output/gatk4/versions.yml - name: gatk4 genotypegvcfs test_gatk4_genotypegvcfs_gendb_input_dbsnp command: nextflow run ./tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gendb_input_dbsnp -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/genotypegvcfs/nextflow.config @@ -85,6 +91,7 @@ "AC=1;AF=0.500;AN=2;BaseQRankSum=0.00;DB;DP=211;ExcessHet=0.0000;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=60.00;MQRankSum=0.00;QD=0.95;ReadPosRankSum=1.09;SOR=0.680", ] - path: output/gatk4/test.genotyped.vcf.gz.tbi + - path: output/gatk4/versions.yml - name: gatk4 genotypegvcfs test_gatk4_genotypegvcfs_gendb_input_intervals command: nextflow run ./tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gendb_input_intervals -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/genotypegvcfs/nextflow.config @@ -98,6 +105,7 @@ "AC=1;AF=0.500;AN=2;BaseQRankSum=0.00;DP=211;ExcessHet=0.0000;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=60.00;MQRankSum=0.00;QD=0.95;ReadPosRankSum=1.09;SOR=0.680", ] - path: output/gatk4/test.genotyped.vcf.gz.tbi + - path: output/gatk4/versions.yml - name: gatk4 genotypegvcfs test_gatk4_genotypegvcfs_gendb_input_dbsnp_intervals command: nextflow run ./tests/modules/gatk4/genotypegvcfs -entry test_gatk4_genotypegvcfs_gendb_input_dbsnp_intervals -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/genotypegvcfs/nextflow.config @@ -108,3 +116,4 @@ - path: output/gatk4/test.genotyped.vcf.gz contains: ["AC=2;AF=1.00;AN=2;DP=2;ExcessHet=0.0000;FS=0.000;MLEAC=1;MLEAF=0.500;MQ=60.00;QD=18.66;SOR=0.693"] - path: output/gatk4/test.genotyped.vcf.gz.tbi + - path: output/gatk4/versions.yml diff --git a/tests/modules/gatk4/getpileupsummaries/test.yml b/tests/modules/gatk4/getpileupsummaries/test.yml index e305d412..2e4acce9 100644 --- a/tests/modules/gatk4/getpileupsummaries/test.yml +++ b/tests/modules/gatk4/getpileupsummaries/test.yml @@ -7,7 +7,6 @@ - path: output/gatk4/test.pileups.table md5sum: 8e0ca6f66e112bd2f7ec1d31a2d62469 - path: output/gatk4/versions.yml - md5sum: 059123619f3ed8d4cd178c4390b81e69 - name: gatk4 getpileupsummaries test_gatk4_getpileupsummaries_separate_sites command: nextflow run tests/modules/gatk4/getpileupsummaries -entry test_gatk4_getpileupsummaries_separate_sites -c tests/config/nextflow.config @@ -18,7 +17,6 @@ - path: output/gatk4/test.pileups.table md5sum: 8e0ca6f66e112bd2f7ec1d31a2d62469 - path: output/gatk4/versions.yml - md5sum: 76b5388b0c5b5762d8d33e34b23f181d - name: gatk4 getpileupsummaries test_gatk4_getpileupsummaries_separate_sites_cram command: nextflow run tests/modules/gatk4/getpileupsummaries -entry test_gatk4_getpileupsummaries_separate_sites_cram -c tests/config/nextflow.config @@ -29,4 +27,3 @@ - path: output/gatk4/test.pileups.table md5sum: 8e0ca6f66e112bd2f7ec1d31a2d62469 - path: output/gatk4/versions.yml - md5sum: 2fa51319c2b1d678ee00ab09512cf268 diff --git a/tests/modules/gatk4/haplotypecaller/test.yml b/tests/modules/gatk4/haplotypecaller/test.yml index 31dd23fd..3d416a0d 100644 --- a/tests/modules/gatk4/haplotypecaller/test.yml +++ b/tests/modules/gatk4/haplotypecaller/test.yml @@ -6,6 +6,7 @@ files: - path: output/gatk4/test.vcf.gz - path: output/gatk4/test.vcf.gz.tbi + - path: output/gatk4/versions.yml - name: gatk4 haplotypecaller test_gatk4_haplotypecaller_cram command: nextflow run ./tests/modules/gatk4/haplotypecaller -entry test_gatk4_haplotypecaller_cram -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/haplotypecaller/nextflow.config @@ -15,6 +16,7 @@ files: - path: output/gatk4/test.vcf.gz - path: output/gatk4/test.vcf.gz.tbi + - path: output/gatk4/versions.yml - name: gatk4 haplotypecaller test_gatk4_haplotypecaller_intervals_dbsnp command: nextflow run ./tests/modules/gatk4/haplotypecaller -entry test_gatk4_haplotypecaller_intervals_dbsnp -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/haplotypecaller/nextflow.config @@ -24,3 +26,4 @@ files: - path: output/gatk4/test.vcf.gz - path: output/gatk4/test.vcf.gz.tbi + - path: output/gatk4/versions.yml diff --git a/tests/modules/gatk4/indexfeaturefile/test.yml b/tests/modules/gatk4/indexfeaturefile/test.yml index 938c2b91..9187fa4a 100644 --- a/tests/modules/gatk4/indexfeaturefile/test.yml +++ b/tests/modules/gatk4/indexfeaturefile/test.yml @@ -5,6 +5,7 @@ - gatk4/indexfeaturefile files: - path: output/gatk4/genome.bed.idx + - path: output/gatk4/versions.yml - name: gatk4 indexfeaturefile test_gatk4_indexfeaturefile_bed_gz command: nextflow run tests/modules/gatk4/indexfeaturefile -entry test_gatk4_indexfeaturefile_bed_gz -c tests/config/nextflow.config -c ./tests/modules/gatk4/indexfeaturefile/nextflow.config @@ -15,7 +16,6 @@ - path: output/gatk4/genome.bed.gz.tbi md5sum: 4bc51e2351a6e83f20e13be75861f941 - path: output/gatk4/versions.yml - md5sum: e5003204702f83aabdb4141272c704d2 - name: gatk4 indexfeaturefile test_gatk4_indexfeaturefile_vcf command: nextflow run tests/modules/gatk4/indexfeaturefile -entry test_gatk4_indexfeaturefile_vcf -c tests/config/nextflow.config -c ./tests/modules/gatk4/indexfeaturefile/nextflow.config @@ -25,7 +25,6 @@ files: - path: output/gatk4/test.genome.vcf.idx - path: output/gatk4/versions.yml - md5sum: 08cd7c49cfb752fc2905f600106a0345 - name: gatk4 indexfeaturefile test_gatk4_indexfeaturefile_vcf_gz command: nextflow run tests/modules/gatk4/indexfeaturefile -entry test_gatk4_indexfeaturefile_vcf_gz -c tests/config/nextflow.config @@ -36,4 +35,3 @@ - path: output/gatk4/test.genome.vcf.gz.tbi md5sum: fedd68eaddf8d31257853d9da8325bd3 - path: output/gatk4/versions.yml - md5sum: b388d1681831a40264a7a27f67a8b247 diff --git a/tests/modules/gatk4/intervallisttobed/test.yml b/tests/modules/gatk4/intervallisttobed/test.yml index 9e6e38c5..a148f745 100644 --- a/tests/modules/gatk4/intervallisttobed/test.yml +++ b/tests/modules/gatk4/intervallisttobed/test.yml @@ -6,3 +6,4 @@ files: - path: output/gatk4/test.bed md5sum: 9046675d01199fbbee79f2bc1c5dce52 + - path: output/gatk4/versions.yml diff --git a/tests/modules/gatk4/intervallisttools/test.yml b/tests/modules/gatk4/intervallisttools/test.yml index c9cb23b8..5542714b 100644 --- a/tests/modules/gatk4/intervallisttools/test.yml +++ b/tests/modules/gatk4/intervallisttools/test.yml @@ -14,3 +14,4 @@ md5sum: 55da0f3c69504148f4e7002a0e072cfe - path: output/gatk4/test_split/temp_0004_of_6/4scattered.interval_list md5sum: d29ca4447f32547f2936567fa902796a + - path: output/gatk4/versions.yml diff --git a/tests/modules/gatk4/learnreadorientationmodel/test.yml b/tests/modules/gatk4/learnreadorientationmodel/test.yml index b88df15f..d3d64c50 100644 --- a/tests/modules/gatk4/learnreadorientationmodel/test.yml +++ b/tests/modules/gatk4/learnreadorientationmodel/test.yml @@ -5,3 +5,4 @@ - gatk4/learnreadorientationmodel files: - path: output/gatk4/test.artifact-prior.tar.gz + - path: output/gatk4/versions.yml diff --git a/tests/modules/gatk4/markduplicates/nextflow.config b/tests/modules/gatk4/markduplicates/nextflow.config index 8730f1c4..787c9589 100644 --- a/tests/modules/gatk4/markduplicates/nextflow.config +++ b/tests/modules/gatk4/markduplicates/nextflow.config @@ -2,4 +2,8 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + withName: GATK4_MARKDUPLICATES { + ext.args = '--CREATE_INDEX true' + } + } diff --git a/tests/modules/gatk4/markduplicates/test.yml b/tests/modules/gatk4/markduplicates/test.yml index 7bf49b56..1fbd768e 100644 --- a/tests/modules/gatk4/markduplicates/test.yml +++ b/tests/modules/gatk4/markduplicates/test.yml @@ -10,7 +10,6 @@ md5sum: 2efd50b2e6b7fd9bdf242cd9e266cfa9 - path: output/gatk4/test.metrics - path: output/gatk4/versions.yml - md5sum: 0bc949aaa8792cd6c537cdaab0e2c145 - name: gatk4 markduplicates test_gatk4_markduplicates_multiple_bams command: nextflow run tests/modules/gatk4/markduplicates -entry test_gatk4_markduplicates_multiple_bams -c tests/config/nextflow.config -c ./tests/modules/gatk4/markduplicates/nextflow.config @@ -24,4 +23,3 @@ md5sum: 8187febc6108ffef7f907e89b9c091a4 - path: output/gatk4/test.metrics - path: output/gatk4/versions.yml - md5sum: b10d63cf7b2b672915cb30cea081ccd5 diff --git a/tests/modules/gatk4/markduplicatesspark/main.nf b/tests/modules/gatk4/markduplicatesspark/main.nf new file mode 100644 index 00000000..2f294f59 --- /dev/null +++ b/tests/modules/gatk4/markduplicatesspark/main.nf @@ -0,0 +1,28 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { GATK4_MARKDUPLICATES_SPARK } from '../../../../modules/gatk4/markduplicatesspark/main.nf' + +workflow test_gatk4_markduplicates_spark { + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) + ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta_fai'], checkIfExists: true) + dict = file(params.test_data['homo_sapiens']['genome']['genome_21_dict'], checkIfExists: true) + + GATK4_MARKDUPLICATES_SPARK ( input, fasta, fai, dict ) +} + +workflow test_gatk4_markduplicates_spark_multiple_bams { + input = [ [ id:'test', single_end:false ], // meta map + [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_bam'], checkIfExists: true) + ] ] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta_fai'], checkIfExists: true) + dict = file(params.test_data['homo_sapiens']['genome']['genome_21_dict'], checkIfExists: true) + + GATK4_MARKDUPLICATES_SPARK ( input, fasta, fai, dict ) +} diff --git a/tests/modules/gatk4/markduplicatesspark/nextflow.config b/tests/modules/gatk4/markduplicatesspark/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/gatk4/markduplicatesspark/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/gatk4/markduplicatesspark/test.yml b/tests/modules/gatk4/markduplicatesspark/test.yml new file mode 100644 index 00000000..b0c0b40d --- /dev/null +++ b/tests/modules/gatk4/markduplicatesspark/test.yml @@ -0,0 +1,25 @@ +- name: gatk4 markduplicates test_gatk4_markduplicates_spark + command: nextflow run tests/modules/gatk4/markduplicatesspark -entry test_gatk4_markduplicates_spark -c tests/config/nextflow.config -c ./tests/modules/gatk4/markduplicatesspark/nextflow.config + tags: + - gatk4 + - gatk4/markduplicatesspark + files: + - path: output/gatk4/test.bai + md5sum: e9c125e82553209933883b4fe2b8d7c2 + - path: output/gatk4/test.bam + md5sum: 2efd50b2e6b7fd9bdf242cd9e266cfa9 + - path: output/gatk4/test.metrics + - path: output/gatk4/versions.yml + +- name: gatk4 markduplicates test_gatk4_markduplicates_spark_multiple_bams + command: nextflow run tests/modules/gatk4/markduplicatesspark -entry test_gatk4_markduplicates_spark_multiple_bams -c tests/config/nextflow.config -c ./tests/modules/gatk4/markduplicatesspark/nextflow.config + tags: + - gatk4 + - gatk4/markduplicatesspark + files: + - path: output/gatk4/test.bai + md5sum: bad71df9c876e72a5bc0a3e0fd755f92 + - path: output/gatk4/test.bam + md5sum: 8187febc6108ffef7f907e89b9c091a4 + - path: output/gatk4/test.metrics + - path: output/gatk4/versions.yml diff --git a/tests/modules/gatk4/mergebamalignment/test.yml b/tests/modules/gatk4/mergebamalignment/test.yml index 5e1ab8d5..b1bb32b2 100644 --- a/tests/modules/gatk4/mergebamalignment/test.yml +++ b/tests/modules/gatk4/mergebamalignment/test.yml @@ -6,3 +6,4 @@ files: - path: output/gatk4/test.bam md5sum: e6f1b343700b7ccb94e81ae127433988 + - path: output/gatk4/versions.yml diff --git a/tests/modules/gatk4/mergemutectstats/test.yml b/tests/modules/gatk4/mergemutectstats/test.yml index 44d1c5f2..cc71854e 100644 --- a/tests/modules/gatk4/mergemutectstats/test.yml +++ b/tests/modules/gatk4/mergemutectstats/test.yml @@ -6,3 +6,4 @@ files: - path: output/gatk4/test.vcf.gz.stats md5sum: 17d2091015d04cbd4a26b7a67dc659e6 + - path: output/gatk4/versions.yml diff --git a/tests/modules/gatk4/mergevcfs/main.nf b/tests/modules/gatk4/mergevcfs/main.nf index fa09d758..99a2158d 100644 --- a/tests/modules/gatk4/mergevcfs/main.nf +++ b/tests/modules/gatk4/mergevcfs/main.nf @@ -5,22 +5,22 @@ nextflow.enable.dsl = 2 include { GATK4_MERGEVCFS } from '../../../../modules/gatk4/mergevcfs/main.nf' workflow test_gatk4_mergevcfs { - input = [ [ id:'test' ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_vcf'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test2_vcf'], checkIfExists: true) ] - ] - dict = file(params.test_data['sarscov2']['genome']['genome_dict'], checkIfExists: true) - - GATK4_MERGEVCFS ( input, dict, false ) -} - -workflow test_gatk4_mergevcfs_refdict { def input = [] input = [ [ id:'test' ], // meta map [ file(params.test_data['sarscov2']['illumina']['test_vcf'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test2_vcf'], checkIfExists: true) ] + file(params.test_data['sarscov2']['illumina']['test2_vcf'], checkIfExists: true) ] ] + dict = file(params.test_data['sarscov2']['genome']['genome_dict'], checkIfExists: true) - GATK4_MERGEVCFS ( input, dict, true ) + GATK4_MERGEVCFS ( input, dict ) +} + +workflow test_gatk4_mergevcfs_no_dict { + input = [ [ id:'test' ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_vcf'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test2_vcf'], checkIfExists: true) ] + ] + + GATK4_MERGEVCFS ( input, [] ) } diff --git a/tests/modules/gatk4/mergevcfs/test.yml b/tests/modules/gatk4/mergevcfs/test.yml index 3ff2bf93..da2f7578 100644 --- a/tests/modules/gatk4/mergevcfs/test.yml +++ b/tests/modules/gatk4/mergevcfs/test.yml @@ -6,12 +6,14 @@ files: - path: output/gatk4/test.vcf.gz md5sum: 5b289bda88d3a3504f2e19ee8cff177c + - path: output/gatk4/versions.yml -- name: gatk4 mergevcfs test_gatk4_mergevcfs_refdict - command: nextflow run ./tests/modules/gatk4/mergevcfs -entry test_gatk4_mergevcfs_refdict -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/mergevcfs/nextflow.config +- name: gatk4 mergevcfs test_gatk4_mergevcfs_no_dict + command: nextflow run ./tests/modules/gatk4/mergevcfs -entry test_gatk4_mergevcfs_no_dict -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/mergevcfs/nextflow.config tags: - gatk4/mergevcfs - gatk4 files: - path: output/gatk4/test.vcf.gz md5sum: 5b289bda88d3a3504f2e19ee8cff177c + - path: output/gatk4/versions.yml diff --git a/tests/modules/gatk4/mutect2/main.nf b/tests/modules/gatk4/mutect2/main.nf index 19b22914..0b4339f0 100644 --- a/tests/modules/gatk4/mutect2/main.nf +++ b/tests/modules/gatk4/mutect2/main.nf @@ -2,22 +2,22 @@ nextflow.enable.dsl = 2 -include { GATK4_MUTECT2 } from '../../../../modules/gatk4/mutect2/main.nf' +include { GATK4_MUTECT2 } from '../../../../modules/gatk4/mutect2/main.nf' +include { GATK4_MUTECT2 as GATK4_MUTECT2_PAIR } from '../../../../modules/gatk4/mutect2/main.nf' +include { GATK4_MUTECT2 as GATK4_MUTECT2_MITO } from '../../../../modules/gatk4/mutect2/main.nf' +include { GATK4_MUTECT2 as GATK4_MUTECT2_F1R2 } from '../../../../modules/gatk4/mutect2/main.nf' workflow test_gatk4_mutect2_tumor_normal_pair { - input = [ [ id:'test'], // meta map + input = [ [ id:'test', normal_id:'normal', tumor_id:'tumour' ], // meta map [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_bam'], checkIfExists: true), file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_bam'], checkIfExists: true) ], [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_bam_bai'], checkIfExists: true), file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_bam_bai'], checkIfExists: true) ], - [], - ["normal"] + [] ] - run_single = false - run_pon = false - run_mito = false + fasta = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta'], checkIfExists: true) fai = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta_fai'], checkIfExists: true) dict = file(params.test_data['homo_sapiens']['genome']['genome_21_dict'], checkIfExists: true) @@ -26,19 +26,38 @@ workflow test_gatk4_mutect2_tumor_normal_pair { panel_of_normals = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_21_vcf_gz'], checkIfExists: true) panel_of_normals_tbi = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_21_vcf_gz_tbi'], checkIfExists: true) - GATK4_MUTECT2 ( input, run_single, run_pon, run_mito, fasta, fai, dict, germline_resource, germline_resource_tbi, panel_of_normals, panel_of_normals_tbi ) + GATK4_MUTECT2_PAIR ( input, fasta, fai, dict, germline_resource, germline_resource_tbi, panel_of_normals, panel_of_normals_tbi ) +} + +workflow test_gatk4_mutect2_tumor_normal_pair_f1r2 { + input = [ [ id:'test', normal_id:'normal', tumor_id:'tumour' ], // meta map + [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_bam'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_bam'], checkIfExists: true) + ], + [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_bam_bai'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_bam_bai'], checkIfExists: true) + ], + [] + ] + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta_fai'], checkIfExists: true) + dict = file(params.test_data['homo_sapiens']['genome']['genome_21_dict'], checkIfExists: true) + germline_resource = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_21_vcf_gz'], checkIfExists: true) + germline_resource_tbi = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_21_vcf_gz_tbi'], checkIfExists: true) + panel_of_normals = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_21_vcf_gz'], checkIfExists: true) + panel_of_normals_tbi = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_21_vcf_gz_tbi'], checkIfExists: true) + + GATK4_MUTECT2_F1R2 ( input, fasta, fai, dict, germline_resource, germline_resource_tbi, panel_of_normals, panel_of_normals_tbi ) } workflow test_gatk4_mutect2_tumor_single { input = [ [ id:'test'], // meta map [ file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_bam'], checkIfExists: true)], [ file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_bam_bai'], checkIfExists: true)], - [], [] ] - run_single = true - run_pon = false - run_mito = false + fasta = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta'], checkIfExists: true) fai = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta_fai'], checkIfExists: true) dict = file(params.test_data['homo_sapiens']['genome']['genome_21_dict'], checkIfExists: true) @@ -47,19 +66,16 @@ workflow test_gatk4_mutect2_tumor_single { panel_of_normals = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_21_vcf_gz'], checkIfExists: true) panel_of_normals_tbi = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_21_vcf_gz_tbi'], checkIfExists: true) - GATK4_MUTECT2 ( input, run_single, run_pon, run_mito, fasta, fai, dict, germline_resource, germline_resource_tbi, panel_of_normals, panel_of_normals_tbi ) + GATK4_MUTECT2 ( input, fasta, fai, dict, germline_resource, germline_resource_tbi, panel_of_normals, panel_of_normals_tbi ) } workflow test_gatk4_mutect2_cram_input { input = [ [ id:'test'], // meta map [ file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_cram'], checkIfExists: true)], [ file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_cram_crai'], checkIfExists: true)], - [], [] ] - run_single = true - run_pon = false - run_mito = false + fasta = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta'], checkIfExists: true) fai = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta_fai'], checkIfExists: true) dict = file(params.test_data['homo_sapiens']['genome']['genome_21_dict'], checkIfExists: true) @@ -68,48 +84,37 @@ workflow test_gatk4_mutect2_cram_input { panel_of_normals = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_21_vcf_gz'], checkIfExists: true) panel_of_normals_tbi = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_21_vcf_gz_tbi'], checkIfExists: true) - GATK4_MUTECT2 ( input, run_single, run_pon, run_mito, fasta, fai, dict, germline_resource, germline_resource_tbi, panel_of_normals, panel_of_normals_tbi ) + GATK4_MUTECT2 ( input, fasta, fai, dict, germline_resource, germline_resource_tbi, panel_of_normals, panel_of_normals_tbi ) } workflow test_gatk4_mutect2_generate_pon { input = [ [ id:'test'], // meta map [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_bam'], checkIfExists: true)], [ file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_bam_bai'], checkIfExists: true)], - [], [] ] - run_single = false - run_pon = true - run_mito = false + fasta = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta'], checkIfExists: true) fai = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta_fai'], checkIfExists: true) dict = file(params.test_data['homo_sapiens']['genome']['genome_21_dict'], checkIfExists: true) - germline_resource = [] - germline_resource_tbi = [] - panel_of_normals = [] - panel_of_normals_tbi = [] - GATK4_MUTECT2 ( input, run_single, run_pon, run_mito, fasta, fai, dict, germline_resource, germline_resource_tbi, panel_of_normals, panel_of_normals_tbi ) + GATK4_MUTECT2 ( input, fasta, fai, dict, [], [], [], [] ) } -// mitochondria mode would ideally have some mitochondria test data, but since the mitochondria settings only increase detection sensitivity, we can use the chr22 data as a stand in as it is already a small dataset, the extra variants detected compared to generate_pon shows the mode is working. +// mitochondria mode would ideally have some mitochondria test data +// but since the mitochondria settings only increase detection sensitivity +// we can use the chr22 data as a stand in as it is already a small dataset +// the extra variants detected compared to generate_pon shows the mode is working workflow test_gatk4_mutect2_mitochondria { input = [ [ id:'test'], // meta map [ file(params.test_data['homo_sapiens']['illumina']['mitochon_standin_recalibrated_sorted_bam'], checkIfExists: true)], [ file(params.test_data['homo_sapiens']['illumina']['mitochon_standin_recalibrated_sorted_bam_bai'], checkIfExists: true)], - [ file(params.test_data['homo_sapiens']['genome']['genome_bed'], checkIfExists: true)], - [] + [ file(params.test_data['homo_sapiens']['genome']['genome_bed'], checkIfExists: true)] ] - run_single = false - run_pon = false - run_mito = true + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) fai = file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) dict = file(params.test_data['homo_sapiens']['genome']['genome_dict'], checkIfExists: true) - germline_resource = [] - germline_resource_tbi = [] - panel_of_normals = [] - panel_of_normals_tbi = [] - GATK4_MUTECT2 ( input, run_single, run_pon, run_mito, fasta, fai, dict, germline_resource, germline_resource_tbi, panel_of_normals, panel_of_normals_tbi ) + GATK4_MUTECT2_MITO ( input, fasta, fai, dict, [], [], [], [] ) } diff --git a/tests/modules/gatk4/mutect2/nextflow.config b/tests/modules/gatk4/mutect2/nextflow.config index 8730f1c4..3b5f6c62 100644 --- a/tests/modules/gatk4/mutect2/nextflow.config +++ b/tests/modules/gatk4/mutect2/nextflow.config @@ -2,4 +2,16 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + withName: GATK4_MUTECT2_PAIR { + ext.args = { "--normal-sample $meta.normal_id" } + } + + withName: GATK4_MUTECT2_MITO { + ext.args = { "--mitochondria-mode" } + } + + withName: GATK4_MUTECT2_F1R2 { + ext.args = { "--normal-sample $meta.normal_id --f1r2-tar-gz ${meta.id}.f1r2.tar.gz" } + } + } diff --git a/tests/modules/gatk4/mutect2/test.yml b/tests/modules/gatk4/mutect2/test.yml index f8107e6d..3cefce09 100644 --- a/tests/modules/gatk4/mutect2/test.yml +++ b/tests/modules/gatk4/mutect2/test.yml @@ -1,5 +1,16 @@ - name: gatk4 mutect2 test_gatk4_mutect2_tumor_normal_pair command: nextflow run ./tests/modules/gatk4/mutect2 -entry test_gatk4_mutect2_tumor_normal_pair -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/mutect2/nextflow.config + tags: + - gatk4 + - gatk4/mutect2 + files: + - path: output/gatk4/test.vcf.gz + - path: output/gatk4/test.vcf.gz.stats + md5sum: 17d2091015d04cbd4a26b7a67dc659e6 + - path: output/gatk4/test.vcf.gz.tbi + +- name: gatk4 mutect2 test_gatk4_mutect2_tumor_normal_pair_f1r2 + command: nextflow run ./tests/modules/gatk4/mutect2 -entry test_gatk4_mutect2_tumor_normal_pair_f1r2 -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/mutect2/nextflow.config tags: - gatk4 - gatk4/mutect2 @@ -9,6 +20,7 @@ - path: output/gatk4/test.vcf.gz.stats md5sum: 17d2091015d04cbd4a26b7a67dc659e6 - path: output/gatk4/test.vcf.gz.tbi + - path: output/gatk4/versions.yml - name: gatk4 mutect2 test_gatk4_mutect2_tumor_single command: nextflow run ./tests/modules/gatk4/mutect2 -entry test_gatk4_mutect2_tumor_single -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/mutect2/nextflow.config @@ -20,6 +32,7 @@ - path: output/gatk4/test.vcf.gz.stats md5sum: 55ed641e16089afb33cdbc478e202d3d - path: output/gatk4/test.vcf.gz.tbi + - path: output/gatk4/versions.yml - name: gatk4 mutect2 test_gatk4_mutect2_cram_input command: nextflow run ./tests/modules/gatk4/mutect2 -entry test_gatk4_mutect2_cram_input -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/mutect2/nextflow.config @@ -31,6 +44,7 @@ - path: output/gatk4/test.vcf.gz.stats md5sum: 55ed641e16089afb33cdbc478e202d3d - path: output/gatk4/test.vcf.gz.tbi + - path: output/gatk4/versions.yml - name: gatk4 mutect2 test_gatk4_mutect2_generate_pon command: nextflow run ./tests/modules/gatk4/mutect2 -entry test_gatk4_mutect2_generate_pon -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/mutect2/nextflow.config @@ -42,6 +56,7 @@ - path: output/gatk4/test.vcf.gz.stats md5sum: b569ce66bbffe9588b3d221e821023ee - path: output/gatk4/test.vcf.gz.tbi + - path: output/gatk4/versions.yml - name: gatk4 mutect2 test_gatk4_mutect2_mitochondria command: nextflow run ./tests/modules/gatk4/mutect2 -entry test_gatk4_mutect2_mitochondria -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/mutect2/nextflow.config @@ -53,3 +68,4 @@ - path: output/gatk4/test.vcf.gz.stats md5sum: fc6ea14ca2da346babe78161beea28c9 - path: output/gatk4/test.vcf.gz.tbi + - path: output/gatk4/versions.yml diff --git a/tests/modules/gatk4/revertsam/test.yml b/tests/modules/gatk4/revertsam/test.yml index 4199b118..2ebdb685 100644 --- a/tests/modules/gatk4/revertsam/test.yml +++ b/tests/modules/gatk4/revertsam/test.yml @@ -6,3 +6,4 @@ files: - path: output/gatk4/test.reverted.bam md5sum: f783a88deb45c3a2c20ca12cbe1c5652 + - path: output/gatk4/versions.yml diff --git a/tests/modules/gatk4/samtofastq/test.yml b/tests/modules/gatk4/samtofastq/test.yml index 66d3ee4c..eb25f33b 100644 --- a/tests/modules/gatk4/samtofastq/test.yml +++ b/tests/modules/gatk4/samtofastq/test.yml @@ -6,6 +6,7 @@ files: - path: output/gatk4/test.fastq.gz md5sum: 50ace41d4c24467f24f8b929540a7797 + - path: output/gatk4/versions.yml - name: gatk4 samtofastq test_gatk4_samtofastq_paired_end command: nextflow run ./tests/modules/gatk4/samtofastq -entry test_gatk4_samtofastq_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/samtofastq/nextflow.config @@ -17,3 +18,4 @@ md5sum: cfea607c9d75fd9ea9704780ad3a499c - path: output/gatk4/test_2.fastq.gz md5sum: 613bf64c023609e1c62ad6ce9e4be8d7 + - path: output/gatk4/versions.yml diff --git a/tests/modules/gatk4/selectvariants/test.yml b/tests/modules/gatk4/selectvariants/test.yml index 5bc32330..42d9dbd8 100644 --- a/tests/modules/gatk4/selectvariants/test.yml +++ b/tests/modules/gatk4/selectvariants/test.yml @@ -7,7 +7,6 @@ - path: output/gatk4/test.selectvariants.vcf.gz - path: output/gatk4/test.selectvariants.vcf.gz.tbi - path: output/gatk4/versions.yml - md5sum: a35d78af179f43652274bc7405d5a785 - name: gatk4 selectvariants test_gatk4_selectvariants_gz_input command: nextflow run tests/modules/gatk4/selectvariants -entry test_gatk4_selectvariants_gz_input -c tests/config/nextflow.config @@ -18,4 +17,3 @@ - path: output/gatk4/test.selectvariants.vcf.gz - path: output/gatk4/test.selectvariants.vcf.gz.tbi - path: output/gatk4/versions.yml - md5sum: c943f3579a369968ca63444eb43fb6e7 diff --git a/tests/modules/gatk4/splitncigarreads/test.yml b/tests/modules/gatk4/splitncigarreads/test.yml index 059d5e75..c38064e2 100644 --- a/tests/modules/gatk4/splitncigarreads/test.yml +++ b/tests/modules/gatk4/splitncigarreads/test.yml @@ -7,4 +7,3 @@ - path: output/gatk4/test.bam md5sum: ceed15c0bd64ff5c38d3816905933b0b - path: output/gatk4/versions.yml - md5sum: 27fceace2528a905ddca2b4db47c4bf5 diff --git a/tests/modules/gatk4/variantfiltration/test.yml b/tests/modules/gatk4/variantfiltration/test.yml index 068e8d63..0ab91091 100644 --- a/tests/modules/gatk4/variantfiltration/test.yml +++ b/tests/modules/gatk4/variantfiltration/test.yml @@ -10,6 +10,7 @@ "BaseQRankSum=-1.318;DP=17;ExcessHet=3.0103;MLEAC=1,0,0;MLEAF=0.500,0.00,0.00;MQRankSum=0.000;RAW_MQandDP=61200,17;ReadPosRankSum=2.365", ] - path: output/gatk4/test.filtered.vcf.gz.tbi + - path: output/gatk4/versions.yml - name: gatk4 variantfiltration test_gatk4_variantfiltration_gz_input command: nextflow run ./tests/modules/gatk4/variantfiltration -entry test_gatk4_variantfiltration_gz_input -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/variantfiltration/nextflow.config @@ -23,3 +24,4 @@ "BaseQRankSum=-1.318;DP=17;ExcessHet=3.0103;MLEAC=1,0,0;MLEAF=0.500,0.00,0.00;MQRankSum=0.000;RAW_MQandDP=61200,17;ReadPosRankSum=2.365", ] - path: output/gatk4/test.filtered.vcf.gz.tbi + - path: output/gatk4/versions.yml diff --git a/tests/modules/gatk4/variantrecalibrator/main.nf b/tests/modules/gatk4/variantrecalibrator/main.nf index be7004e7..66dde5dd 100644 --- a/tests/modules/gatk4/variantrecalibrator/main.nf +++ b/tests/modules/gatk4/variantrecalibrator/main.nf @@ -2,73 +2,67 @@ nextflow.enable.dsl = 2 -include { GATK4_VARIANTRECALIBRATOR as GATK4_VARIANTRECALIBRATOR_NO_ALLELESPECIFICTY } from '../../../../modules/gatk4/variantrecalibrator/main.nf' +include { GATK4_VARIANTRECALIBRATOR as GATK4_VARIANTRECALIBRATOR_NO_ALLELESPECIFICTY } from '../../../../modules/gatk4/variantrecalibrator/main.nf' include { GATK4_VARIANTRECALIBRATOR as GATK4_VARIANTRECALIBRATOR_WITH_ALLELESPECIFICTY } from '../../../../modules/gatk4/variantrecalibrator/main.nf' workflow test_gatk4_variantrecalibrator { input = [ [ id:'test' ], // meta map - file(params.test_data['homo_sapiens']['illumina']['test2_haplotc_ann_vcf_gz'], checkIfExists: true), - file(params.test_data['homo_sapiens']['illumina']['test2_haplotc_ann_vcf_gz_tbi'], checkIfExists: true) + file(params.test_data['homo_sapiens']['illumina']['test2_haplotc_ann_vcf_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_haplotc_ann_vcf_gz_tbi'], checkIfExists: true) ] + resources = [[ + file(params.test_data['homo_sapiens']['genome']['hapmap_3_3_hg38_21_vcf_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['genome']['res_1000g_omni2_5_hg38_21_vcf_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['genome']['res_1000g_phase1_snps_hg38_21_vcf_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['genome']['dbsnp_138_hg38_21_vcf_gz'], checkIfExists: true) + ], [ + file(params.test_data['homo_sapiens']['genome']['hapmap_3_3_hg38_21_vcf_gz_tbi'], checkIfExists: true), + file(params.test_data['homo_sapiens']['genome']['res_1000g_omni2_5_hg38_21_vcf_gz_tbi'], checkIfExists: true), + file(params.test_data['homo_sapiens']['genome']['res_1000g_phase1_snps_hg38_21_vcf_gz_tbi'], checkIfExists: true), + file(params.test_data['homo_sapiens']['genome']['dbsnp_138_hg38_21_vcf_gz_tbi'], checkIfExists: true) + ], [ + 'hapmap,known=false,training=true,truth=true,prior=15.0 hapmap_3.3.hg38.vcf.gz', + 'omni,known=false,training=true,truth=false,prior=12.0 1000G_omni2.5.hg38.vcf.gz', + '1000G,known=false,training=true,truth=false,prior=10.0 1000G_phase1.snps.hg38.vcf.gz', + 'dbsnp,known=true,training=false,truth=false,prior=2.0 dbsnp_138.hg38.vcf.gz' + ]] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta'], checkIfExists: true) fai = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta_fai'], checkIfExists: true) dict = file(params.test_data['homo_sapiens']['genome']['genome_21_dict'], checkIfExists: true) - resources = [ - [ - file(params.test_data['homo_sapiens']['genome']['hapmap_3_3_hg38_21_vcf_gz'], checkIfExists: true), - file(params.test_data['homo_sapiens']['genome']['res_1000g_omni2_5_hg38_21_vcf_gz'], checkIfExists: true), - file(params.test_data['homo_sapiens']['genome']['res_1000g_phase1_snps_hg38_21_vcf_gz'], checkIfExists: true), - file(params.test_data['homo_sapiens']['genome']['dbsnp_138_hg38_21_vcf_gz'], checkIfExists: true) - ], - [ - file(params.test_data['homo_sapiens']['genome']['hapmap_3_3_hg38_21_vcf_gz_tbi'], checkIfExists: true), - file(params.test_data['homo_sapiens']['genome']['res_1000g_omni2_5_hg38_21_vcf_gz_tbi'], checkIfExists: true), - file(params.test_data['homo_sapiens']['genome']['res_1000g_phase1_snps_hg38_21_vcf_gz_tbi'], checkIfExists: true), - file(params.test_data['homo_sapiens']['genome']['dbsnp_138_hg38_21_vcf_gz_tbi'], checkIfExists: true) - ], - [ - 'hapmap,known=false,training=true,truth=true,prior=15.0 hapmap_3.3.hg38.vcf.gz', - 'omni,known=false,training=true,truth=false,prior=12.0 1000G_omni2.5.hg38.vcf.gz', - '1000G,known=false,training=true,truth=false,prior=10.0 1000G_phase1.snps.hg38.vcf.gz', - 'dbsnp,known=true,training=false,truth=false,prior=2.0 dbsnp_138.hg38.vcf.gz' - ] - ] - GATK4_VARIANTRECALIBRATOR_NO_ALLELESPECIFICTY ( input, fasta, fai, dict, resources) + GATK4_VARIANTRECALIBRATOR_NO_ALLELESPECIFICTY(input, resources, fasta, fai, dict) } workflow test_gatk4_variantrecalibrator_allele_specific { input = [ [ id:'test' ], // meta map - file(params.test_data['homo_sapiens']['illumina']['test2_haplotc_ann_vcf_gz'], checkIfExists: true), - file(params.test_data['homo_sapiens']['illumina']['test2_haplotc_ann_vcf_gz_tbi'], checkIfExists: true) + file(params.test_data['homo_sapiens']['illumina']['test2_haplotc_ann_vcf_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_haplotc_ann_vcf_gz_tbi'], checkIfExists: true) ] + resources = [[ + file(params.test_data['homo_sapiens']['genome']['hapmap_3_3_hg38_21_vcf_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['genome']['res_1000g_omni2_5_hg38_21_vcf_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['genome']['res_1000g_phase1_snps_hg38_21_vcf_gz'], checkIfExists: true), + file(params.test_data['homo_sapiens']['genome']['dbsnp_138_hg38_21_vcf_gz'], checkIfExists: true) + ], [ + file(params.test_data['homo_sapiens']['genome']['hapmap_3_3_hg38_21_vcf_gz_tbi'], checkIfExists: true), + file(params.test_data['homo_sapiens']['genome']['res_1000g_omni2_5_hg38_21_vcf_gz_tbi'], checkIfExists: true), + file(params.test_data['homo_sapiens']['genome']['res_1000g_phase1_snps_hg38_21_vcf_gz_tbi'], checkIfExists: true), + file(params.test_data['homo_sapiens']['genome']['dbsnp_138_hg38_21_vcf_gz_tbi'], checkIfExists: true) + ], [ + 'hapmap,known=false,training=true,truth=true,prior=15.0 hapmap_3.3.hg38.vcf.gz', + 'omni,known=false,training=true,truth=false,prior=12.0 1000G_omni2.5.hg38.vcf.gz', + '1000G,known=false,training=true,truth=false,prior=10.0 1000G_phase1.snps.hg38.vcf.gz', + 'dbsnp,known=true,training=false,truth=false,prior=2.0 dbsnp_138.hg38.vcf.gz' + ]] + fasta = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta'], checkIfExists: true) fai = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta_fai'], checkIfExists: true) dict = file(params.test_data['homo_sapiens']['genome']['genome_21_dict'], checkIfExists: true) - resources = [ - [ - file(params.test_data['homo_sapiens']['genome']['hapmap_3_3_hg38_21_vcf_gz'], checkIfExists: true), - file(params.test_data['homo_sapiens']['genome']['res_1000g_omni2_5_hg38_21_vcf_gz'], checkIfExists: true), - file(params.test_data['homo_sapiens']['genome']['res_1000g_phase1_snps_hg38_21_vcf_gz'], checkIfExists: true), - file(params.test_data['homo_sapiens']['genome']['dbsnp_138_hg38_21_vcf_gz'], checkIfExists: true) - ], - [ - file(params.test_data['homo_sapiens']['genome']['hapmap_3_3_hg38_21_vcf_gz_tbi'], checkIfExists: true), - file(params.test_data['homo_sapiens']['genome']['res_1000g_omni2_5_hg38_21_vcf_gz_tbi'], checkIfExists: true), - file(params.test_data['homo_sapiens']['genome']['res_1000g_phase1_snps_hg38_21_vcf_gz_tbi'], checkIfExists: true), - file(params.test_data['homo_sapiens']['genome']['dbsnp_138_hg38_21_vcf_gz_tbi'], checkIfExists: true) - ], - [ - 'hapmap,known=false,training=true,truth=true,prior=15.0 hapmap_3.3.hg38.vcf.gz', - 'omni,known=false,training=true,truth=false,prior=12.0 1000G_omni2.5.hg38.vcf.gz', - '1000G,known=false,training=true,truth=false,prior=10.0 1000G_phase1.snps.hg38.vcf.gz', - 'dbsnp,known=true,training=false,truth=false,prior=2.0 dbsnp_138.hg38.vcf.gz' - ] - ] - GATK4_VARIANTRECALIBRATOR_WITH_ALLELESPECIFICTY ( input, fasta, fai, dict, resources) + GATK4_VARIANTRECALIBRATOR_WITH_ALLELESPECIFICTY(input, resources, fasta, fai, dict) } diff --git a/tests/modules/gatk4/variantrecalibrator/nextflow.config b/tests/modules/gatk4/variantrecalibrator/nextflow.config index 69be3b9c..6c3a9116 100644 --- a/tests/modules/gatk4/variantrecalibrator/nextflow.config +++ b/tests/modules/gatk4/variantrecalibrator/nextflow.config @@ -1,6 +1,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + withName: GATK4_VARIANTRECALIBRATOR { ext.args = '--mode SNP -an QD -an MQ -an FS -an SOR' } diff --git a/tests/modules/gatk4/variantrecalibrator/test.yml b/tests/modules/gatk4/variantrecalibrator/test.yml index 42b18e36..bc84bfb3 100644 --- a/tests/modules/gatk4/variantrecalibrator/test.yml +++ b/tests/modules/gatk4/variantrecalibrator/test.yml @@ -10,6 +10,7 @@ - path: output/gatk4/test.recal.idx - path: output/gatk4/test.tranches md5sum: d238e97bf996863969dac7751e345549 + - path: output/gatk4/versions.yml - name: gatk4 variantrecalibrator test_gatk4_variantrecalibrator_allele_specific command: nextflow run tests/modules/gatk4/variantrecalibrator -entry test_gatk4_variantrecalibrator_allele_specific -c tests/config/nextflow.config -c ./tests/modules/gatk4/variantrecalibrator/nextflow.config @@ -23,3 +24,4 @@ - path: output/gatk4/test.recal.idx - path: output/gatk4/test.tranches md5sum: 444438d46716593634a6817958099292 + - path: output/gatk4/versions.yml From 0f7c04647747f535c94f63908bdd82c77f9a9ed1 Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Wed, 13 Apr 2022 12:51:17 +0200 Subject: [PATCH 112/283] first commit --- modules/elprep/merge/main.nf | 44 ++++++++++++++++++++++ modules/elprep/merge/meta.yml | 44 ++++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/elprep/merge/main.nf | 17 +++++++++ tests/modules/elprep/merge/nextflow.config | 5 +++ tests/modules/elprep/merge/test.yml | 11 ++++++ 6 files changed, 125 insertions(+) create mode 100644 modules/elprep/merge/main.nf create mode 100644 modules/elprep/merge/meta.yml create mode 100644 tests/modules/elprep/merge/main.nf create mode 100644 tests/modules/elprep/merge/nextflow.config create mode 100644 tests/modules/elprep/merge/test.yml diff --git a/modules/elprep/merge/main.nf b/modules/elprep/merge/main.nf new file mode 100644 index 00000000..8312d35b --- /dev/null +++ b/modules/elprep/merge/main.nf @@ -0,0 +1,44 @@ +process ELPREP_MERGE { + tag "$meta.id" + label 'process_medium' + + conda (params.enable_conda ? "bioconda::elprep=5.1.2" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/elprep:5.1.2--he881be0_0': + 'quay.io/biocontainers/elprep:5.1.2--he881be0_0' }" + + input: + tuple val(meta), path(bam) + + output: + tuple val(meta), path("**.{bam,sam}"), emit: bam + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + if (meta.single_end) { + args += " --single-end" + } + def suffix = args.contains("--output-type sam") ? "sam" : "bam" + + """ + # create directory and move all input so elprep can find and merge them before splitting + mkdir input + mv ${bam} input/ + + elprep merge \\ + input \\ + ${prefix}.${suffix} \\ + $args \\ + --nr-of-threads $task.cpus + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + elprep: \$(elprep 2>&1 | head -n2 | tail -n1 |sed 's/^.*version //;s/ compiled.*\$//') + END_VERSIONS + """ +} diff --git a/modules/elprep/merge/meta.yml b/modules/elprep/merge/meta.yml new file mode 100644 index 00000000..e157fddb --- /dev/null +++ b/modules/elprep/merge/meta.yml @@ -0,0 +1,44 @@ +name: "elprep_merge" +description: Merge split bam/sam chunks in one file +keywords: + - bam + - sam + - merge +tools: + - "elprep": + description: "elPrep is a high-performance tool for preparing .sam/.bam files for variant calling in sequencing pipelines. It can be used as a drop-in replacement for SAMtools/Picard/GATK4." + homepage: "None" + documentation: "None" + tool_dev_url: "None" + doi: "" + licence: "['AGPL v3']" + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: List of BAM/SAM chunks to merge + pattern: "*.{bam,sam}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + # + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - bam: + type: file + description: Merged BAM/SAM file + pattern: "*.{bam,sam}" + +authors: + - "@matthdsm" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index cd4913cf..ea22a0d6 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -587,6 +587,10 @@ ectyper: - modules/ectyper/** - tests/modules/ectyper/** +elprep/merge: + - modules/elprep/merge/** + - tests/modules/elprep/merge/** + emmtyper: - modules/emmtyper/** - tests/modules/emmtyper/** diff --git a/tests/modules/elprep/merge/main.nf b/tests/modules/elprep/merge/main.nf new file mode 100644 index 00000000..b4a40ce3 --- /dev/null +++ b/tests/modules/elprep/merge/main.nf @@ -0,0 +1,17 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { ELPREP_SPLIT } from '../../../../modules/elprep/split/main.nf' +include { ELPREP_MERGE } from '../../../../modules/elprep/merge/main.nf' + +workflow test_elprep_merge { + + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) + ] + + ELPREP_SPLIT ( input ) + ELPREP_MERGE ( ELPREP_SPLIT.out.bam ) +} diff --git a/tests/modules/elprep/merge/nextflow.config b/tests/modules/elprep/merge/nextflow.config new file mode 100644 index 00000000..50f50a7a --- /dev/null +++ b/tests/modules/elprep/merge/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} \ No newline at end of file diff --git a/tests/modules/elprep/merge/test.yml b/tests/modules/elprep/merge/test.yml new file mode 100644 index 00000000..c66fe030 --- /dev/null +++ b/tests/modules/elprep/merge/test.yml @@ -0,0 +1,11 @@ +- name: "elprep merge" + command: nextflow run ./tests/modules/elprep/merge -entry test_elprep_merge -c ./tests/config/nextflow.config -c ./tests/modules/elprep/merge/nextflow.config + tags: + - "elprep" + # + - "elprep/merge" + # + files: + - path: "output/elprep/test.bam" + md5sum: e667c7caad0bc4b7ac383fd023c654fc + - path: output/elprep/versions.yml From 39b4b45ba0ed07432f39c3912a4ca615e782db5f Mon Sep 17 00:00:00 2001 From: Sofia Stamouli <91951607+sofstam@users.noreply.github.com> Date: Wed, 13 Apr 2022 13:31:32 +0200 Subject: [PATCH 113/283] Update test_data.config to include kaiju.tar.gz (#1520) --- tests/config/test_data.config | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/tests/config/test_data.config b/tests/config/test_data.config index 1a5c377c..97d22981 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -28,9 +28,8 @@ params { kraken2_bracken = "${test_data_dir}/genomics/sarscov2/genome/db/kraken2_bracken" kraken2_bracken_tar_gz = "${test_data_dir}/genomics/sarscov2/genome/db/kraken2_bracken.tar.gz" - kaiju_fmi = "${test_data_dir}/genomics/sarscov2/genome/db/kaiju/proteins.fmi" - kaiju_nodes = "${test_data_dir}/genomics/sarscov2/genome/db/kaiju/nodes.dmp" - kaiju_names = "${test_data_dir}/genomics/sarscov2/genome/db/kaiju/names.dmp" + kaiju = "${test_data_dir}/genomics/sarscov2/genome/db/kaiju" + kaiju_tar_gz = "${test_data_dir}/genomics/sarscov2/genome/db/kaiju.tar.gz" ncbi_taxmap_zip = "${test_data_dir}/genomics/sarscov2/genome/db/maltextract/ncbi_taxmap.zip" taxon_list_txt = "${test_data_dir}/genomics/sarscov2/genome/db/maltextract/taxon_list.txt" From 4f5274c3de0c9521f5033893ff61057a74c45ba9 Mon Sep 17 00:00:00 2001 From: "Maxime U. Garcia" Date: Wed, 13 Apr 2022 14:30:34 +0200 Subject: [PATCH 114/283] feat: remove read_group which should be set up with task.ext.args (#1521) * feat: remove reat_group which should be set up with ext.args * fix: simplify dragmap command --- modules/bwa/mem/main.nf | 2 -- modules/bwamem2/mem/main.nf | 2 -- modules/dragmap/align/main.nf | 53 +++++++++++------------------------ 3 files changed, 16 insertions(+), 41 deletions(-) diff --git a/modules/bwa/mem/main.nf b/modules/bwa/mem/main.nf index ffa51908..f55af944 100644 --- a/modules/bwa/mem/main.nf +++ b/modules/bwa/mem/main.nf @@ -23,14 +23,12 @@ process BWA_MEM { def args = task.ext.args ?: '' def args2 = task.ext.args2 ?: '' def prefix = task.ext.prefix ?: "${meta.id}" - def read_group = meta.read_group ? "-R ${meta.read_group}" : "" def samtools_command = sort_bam ? 'sort' : 'view' """ INDEX=`find -L ./ -name "*.amb" | sed 's/.amb//'` bwa mem \\ $args \\ - $read_group \\ -t $task.cpus \\ \$INDEX \\ $reads \\ diff --git a/modules/bwamem2/mem/main.nf b/modules/bwamem2/mem/main.nf index 50d84cb0..978c4019 100644 --- a/modules/bwamem2/mem/main.nf +++ b/modules/bwamem2/mem/main.nf @@ -23,7 +23,6 @@ process BWAMEM2_MEM { def args = task.ext.args ?: '' def args2 = task.ext.args2 ?: '' def prefix = task.ext.prefix ?: "${meta.id}" - def read_group = meta.read_group ? "-R ${meta.read_group}" : "" def samtools_command = sort_bam ? 'sort' : 'view' """ INDEX=`find -L ./ -name "*.amb" | sed 's/.amb//'` @@ -31,7 +30,6 @@ process BWAMEM2_MEM { bwa-mem2 \\ mem \\ $args \\ - $read_group \\ -t $task.cpus \\ \$INDEX \\ $reads \\ diff --git a/modules/dragmap/align/main.nf b/modules/dragmap/align/main.nf index b7f1e33b..f0d59f05 100644 --- a/modules/dragmap/align/main.nf +++ b/modules/dragmap/align/main.nf @@ -24,44 +24,23 @@ process DRAGMAP_ALIGN { def args = task.ext.args ?: '' def args2 = task.ext.args2 ?: '' def prefix = task.ext.prefix ?: "${meta.id}" - def read_group = meta.read_group ? "--RGSM ${meta.read_group}" : "" + def reads_command = meta.single_end ? "-1 $reads" : "-1 ${reads[0]} -2 ${reads[1]}" def samtools_command = sort_bam ? 'sort' : 'view' - if (meta.single_end) { - """ - dragen-os \\ - -r $hashmap \\ - $args \\ - $read_group \\ - --num-threads $task.cpus \\ - -1 $reads \\ - 2> ${prefix}.dragmap.log \\ - | samtools $samtools_command $args2 --threads $task.cpus -o ${prefix}.bam - - cat <<-END_VERSIONS > versions.yml - "${task.process}": - dragmap: \$(echo \$(dragen-os --version 2>&1)) - samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') - pigz: \$( pigz --version 2>&1 | sed 's/pigz //g' ) - END_VERSIONS - """ - } else { - """ - dragen-os \\ - -r $hashmap \\ - $args \\ - $read_group \\ - --num-threads $task.cpus \\ - -1 ${reads[0]} \\ - -2 ${reads[1]} \\ - 2> ${prefix}.dragmap.log \\ - | samtools $samtools_command $args2 --threads $task.cpus -o ${prefix}.bam - + """ + dragen-os \\ + -r $hashmap \\ + $args \\ + --num-threads $task.cpus \\ + $reads_command \\ + 2> ${prefix}.dragmap.log \\ + | samtools $samtools_command $args2 --threads $task.cpus -o ${prefix}.bam - - cat <<-END_VERSIONS > versions.yml - "${task.process}": - dragmap: \$(echo \$(dragen-os --version 2>&1)) - samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') - pigz: \$( pigz --version 2>&1 | sed 's/pigz //g' ) - END_VERSIONS - """ - } + cat <<-END_VERSIONS > versions.yml + "${task.process}": + dragmap: \$(echo \$(dragen-os --version 2>&1)) + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + pigz: \$( pigz --version 2>&1 | sed 's/pigz //g' ) + END_VERSIONS + """ } From 8856f127c58f6af479128be8b8df4d42e442ddbe Mon Sep 17 00:00:00 2001 From: Sofia Stamouli <91951607+sofstam@users.noreply.github.com> Date: Wed, 13 Apr 2022 14:53:08 +0200 Subject: [PATCH 115/283] Update input in kaiju_kaiju module (#1522) --- modules/kaiju/kaiju/main.nf | 10 ++++++---- modules/kaiju/kaiju/meta.yml | 1 + tests/modules/kaiju/kaiju/main.nf | 18 ++++++++---------- 3 files changed, 15 insertions(+), 14 deletions(-) diff --git a/modules/kaiju/kaiju/main.nf b/modules/kaiju/kaiju/main.nf index 4050ede5..ae8f99e6 100644 --- a/modules/kaiju/kaiju/main.nf +++ b/modules/kaiju/kaiju/main.nf @@ -9,11 +9,11 @@ process KAIJU_KAIJU { input: tuple val(meta), path(reads) - tuple path(db), path(dbnodes) + path(db) output: tuple val(meta), path('*.tsv'), emit: results - path "versions.yml" , emit: versions + path "versions.yml" , emit: versions when: task.ext.when == null || task.ext.when @@ -23,11 +23,13 @@ process KAIJU_KAIJU { def prefix = task.ext.prefix ?: "${meta.id}" def input = meta.single_end ? "-i ${reads}" : "-i ${reads[0]} -j ${reads[1]}" """ + dbnodes=`find -L ${db} -name "*nodes.dmp"` + dbname=`find -L ${db} -name "*.fmi" -not -name "._*"` kaiju \\ $args \\ -z $task.cpus \\ - -t ${dbnodes} \\ - -f ${db} \\ + -t \$dbnodes \\ + -f \$dbname \\ -o ${prefix}.tsv \\ $input diff --git a/modules/kaiju/kaiju/meta.yml b/modules/kaiju/kaiju/meta.yml index 69a74037..e24c8efc 100644 --- a/modules/kaiju/kaiju/meta.yml +++ b/modules/kaiju/kaiju/meta.yml @@ -50,3 +50,4 @@ output: authors: - "@talnor" - "@sofstam" + - "@jfy133" diff --git a/tests/modules/kaiju/kaiju/main.nf b/tests/modules/kaiju/kaiju/main.nf index 00da82a9..10849ff8 100644 --- a/tests/modules/kaiju/kaiju/main.nf +++ b/tests/modules/kaiju/kaiju/main.nf @@ -2,6 +2,7 @@ nextflow.enable.dsl = 2 +include { UNTAR } from '../../../../modules/untar/main.nf' include { KAIJU_KAIJU } from '../../../../modules/kaiju/kaiju/main.nf' workflow test_kaiju_kaiju_single_end { @@ -10,12 +11,10 @@ workflow test_kaiju_kaiju_single_end { [ id:'test', single_end:true ], // meta map file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] - db = [ - file(params.test_data['sarscov2']['genome']['kaiju_fmi'], checkIfExists: true), // database - file(params.test_data['sarscov2']['genome']['kaiju_nodes'], checkIfExists: true) // taxon nodes - ] + db = [ [], file(params.test_data['sarscov2']['genome']['kaiju_tar_gz'], checkIfExists: true) ] - KAIJU_KAIJU ( input, db ) + UNTAR ( db ) + KAIJU_KAIJU ( input, UNTAR.out.untar.map{ it[1] } ) } workflow test_kaiju_kaiju_paired_end { @@ -25,10 +24,9 @@ workflow test_kaiju_kaiju_paired_end { [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] ] - db = [ - file(params.test_data['sarscov2']['genome']['kaiju_fmi'], checkIfExists: true), // database - file(params.test_data['sarscov2']['genome']['kaiju_nodes'], checkIfExists: true) // taxon nodes - ] + db = [ [], file(params.test_data['sarscov2']['genome']['kaiju_tar_gz'], checkIfExists: true) ] + + UNTAR ( db ) + KAIJU_KAIJU ( input, UNTAR.out.untar.map{ it[1] } ) - KAIJU_KAIJU ( input, db ) } From e04970b7d249365cafa5a52912f9a28840481c05 Mon Sep 17 00:00:00 2001 From: "Maxime U. Garcia" Date: Wed, 13 Apr 2022 15:15:44 +0200 Subject: [PATCH 116/283] typo in command line (#1523) --- modules/gatk4/markduplicatesspark/main.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/gatk4/markduplicatesspark/main.nf b/modules/gatk4/markduplicatesspark/main.nf index 01a19e5c..77e135db 100644 --- a/modules/gatk4/markduplicatesspark/main.nf +++ b/modules/gatk4/markduplicatesspark/main.nf @@ -23,7 +23,7 @@ process GATK4_MARKDUPLICATES_SPARK { script: def args = task.ext.args ?: '' prefix = task.ext.prefix ?: "${meta.id}" - def input_list = bam.collect{"--INPUT $it"}.join(' ') + def input_list = bam.collect{"--input $it"}.join(' ') def avail_mem = 3 if (!task.memory) { From be4ae28c3c95b3c4047a7d9fb4cb0ed749631cea Mon Sep 17 00:00:00 2001 From: Sofia Stamouli <91951607+sofstam@users.noreply.github.com> Date: Wed, 13 Apr 2022 15:33:51 +0200 Subject: [PATCH 117/283] Add centrifuge_kreport module (#1514) --- modules/centrifuge/kreport/main.nf | 33 +++++++++++++++ modules/centrifuge/kreport/meta.yml | 41 +++++++++++++++++++ tests/modules/centrifuge/centrifuge/main.nf | 1 - tests/modules/centrifuge/kreport/main.nf | 32 +++++++++++++++ .../centrifuge/kreport/nextflow.config | 5 +++ tests/modules/centrifuge/kreport/test.yml | 21 ++++++++++ 6 files changed, 132 insertions(+), 1 deletion(-) create mode 100644 modules/centrifuge/kreport/main.nf create mode 100644 modules/centrifuge/kreport/meta.yml create mode 100644 tests/modules/centrifuge/kreport/main.nf create mode 100644 tests/modules/centrifuge/kreport/nextflow.config create mode 100644 tests/modules/centrifuge/kreport/test.yml diff --git a/modules/centrifuge/kreport/main.nf b/modules/centrifuge/kreport/main.nf new file mode 100644 index 00000000..124cbdba --- /dev/null +++ b/modules/centrifuge/kreport/main.nf @@ -0,0 +1,33 @@ +process CENTRIFUGE_KREPORT { + tag "$meta.id" + label 'process_low' + + conda (params.enable_conda ? "bioconda::centrifuge=1.0.4_beta" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/centrifuge:1.0.4_beta--h9a82719_6': + 'quay.io/biocontainers/centrifuge:1.0.4_beta--h9a82719_6' }" + + input: + tuple val(meta), path(results) + path db + + output: + tuple val(meta), path('*.txt') , emit: kreport + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + """ + db_name=`find -L ${db} -name "*.1.cf" -not -name "._*" | sed 's/.1.cf//'` + centrifuge-kreport -x \$db_name ${results} > ${prefix}.txt + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + centrifuge: \$( centrifuge --version | sed -n 1p | sed 's/^.*centrifuge-class version //') + END_VERSIONS + """ +} diff --git a/modules/centrifuge/kreport/meta.yml b/modules/centrifuge/kreport/meta.yml new file mode 100644 index 00000000..fbcae24f --- /dev/null +++ b/modules/centrifuge/kreport/meta.yml @@ -0,0 +1,41 @@ +name: "centrifuge_kreport" +description: Creates Kraken-style reports from centrifuge out files +keywords: + - metagenomics +tools: + - centrifuge: + description: Centrifuge is a classifier for metagenomic sequences. + homepage: https://ccb.jhu.edu/software/centrifuge/ + documentation: https://ccb.jhu.edu/software/centrifuge/manual.shtml + doi: 10.1101/gr.210641.116 + licence: ["GPL v3"] +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - results: + type: file + description: File containing the centrifuge classification results + pattern: "*.{txt}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - kreport: + type: file + description: | + File containing kraken-style report from centrifuge + out files. + pattern: "*.{txt}" +authors: + - "@sofstam" + - "@jfy133" diff --git a/tests/modules/centrifuge/centrifuge/main.nf b/tests/modules/centrifuge/centrifuge/main.nf index 7e44bd80..35deeb58 100644 --- a/tests/modules/centrifuge/centrifuge/main.nf +++ b/tests/modules/centrifuge/centrifuge/main.nf @@ -25,7 +25,6 @@ workflow test_centrifuge_centrifuge_paired_end { file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] ] db = [ [], file('https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/delete_me/minigut_cf.tar.gz', checkIfExists: true) ] - //db_name = "minigut_cf" save_unaligned = true save_aligned = false sam_format = false diff --git a/tests/modules/centrifuge/kreport/main.nf b/tests/modules/centrifuge/kreport/main.nf new file mode 100644 index 00000000..397d33aa --- /dev/null +++ b/tests/modules/centrifuge/kreport/main.nf @@ -0,0 +1,32 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { UNTAR } from '../../../../modules/untar/main.nf' +include { CENTRIFUGE_CENTRIFUGE } from '../../../../modules/centrifuge/centrifuge/main.nf' +include { CENTRIFUGE_KREPORT } from '../../../../modules/centrifuge/kreport/main.nf' + +workflow test_centrifuge_kreport_single_end { + + input = [ [ id:'test', single_end:true ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] + ] + db = [ [], file('https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/delete_me/minigut_cf.tar.gz', checkIfExists: true) ] + + ch_db = UNTAR ( db ) + CENTRIFUGE_CENTRIFUGE ( input, ch_db.untar.map{ it[1] }, false, false, false ) + CENTRIFUGE_KREPORT ( CENTRIFUGE_CENTRIFUGE.out.results, ch_db.untar.map{ it[1] } ) +} + +workflow test_centrifuge_kreport_paired_end { + input = [ [ id:'test', single_end:false ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] + ] + db = [ [], file('https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/delete_me/minigut_cf.tar.gz', checkIfExists: true) ] + + ch_db = UNTAR ( db ) + CENTRIFUGE_CENTRIFUGE ( input, ch_db.untar.map{ it[1] }, false, false, false ) + CENTRIFUGE_KREPORT ( CENTRIFUGE_CENTRIFUGE.out.results, ch_db.untar.map{ it[1] } ) +} + diff --git a/tests/modules/centrifuge/kreport/nextflow.config b/tests/modules/centrifuge/kreport/nextflow.config new file mode 100644 index 00000000..50f50a7a --- /dev/null +++ b/tests/modules/centrifuge/kreport/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} \ No newline at end of file diff --git a/tests/modules/centrifuge/kreport/test.yml b/tests/modules/centrifuge/kreport/test.yml new file mode 100644 index 00000000..167cb0dc --- /dev/null +++ b/tests/modules/centrifuge/kreport/test.yml @@ -0,0 +1,21 @@ +- name: centrifuge kreport test_centrifuge_kreport_single_end + command: nextflow run tests/modules/centrifuge/kreport -entry test_centrifuge_kreport_single_end -c tests/config/nextflow.config + tags: + - centrifuge + - centrifuge/kreport + files: + - path: output/centrifuge/test.txt + md5sum: af1a51fe57eb6d428350ff4a4bf759d4 + contains: ["unclassified"] + - path: output/centrifuge/versions.yml + +- name: centrifuge kreport test_centrifuge_kreport_paired_end + command: nextflow run tests/modules/centrifuge/kreport -entry test_centrifuge_kreport_paired_end -c tests/config/nextflow.config + tags: + - centrifuge + - centrifuge/kreport + files: + - path: output/centrifuge/test.txt + md5sum: af1a51fe57eb6d428350ff4a4bf759d4 + contains: ["unclassified"] + - path: output/centrifuge/versions.yml From c7def8707bccdbafa54c60be275dcb045e907dd8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Matthias=20H=C3=B6rtenhuber?= Date: Thu, 14 Apr 2022 13:35:01 +0200 Subject: [PATCH 118/283] fix incorrect yaml formatting. (#1264) --- modules/allelecounter/meta.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/allelecounter/meta.yml b/modules/allelecounter/meta.yml index 7d921e12..0734512e 100644 --- a/modules/allelecounter/meta.yml +++ b/modules/allelecounter/meta.yml @@ -32,8 +32,8 @@ input: description: loci file pattern: "*.{tsv}" - fasta: - type: file - description: Input genome fasta file. Required when passing CRAM files. + type: file + description: Input genome fasta file. Required when passing CRAM files. output: - meta: From a6cb75174bfbd131f3da14d8cd3e34c1a2e6e268 Mon Sep 17 00:00:00 2001 From: Anders Jemt Date: Tue, 19 Apr 2022 10:53:39 +0200 Subject: [PATCH 119/283] Add variant catalog ch to stranger (#1508) * Add optional variant catalog input * fix for no variant catalog test Co-authored-by: ljmesi <37740329+ljmesi@users.noreply.github.com> Co-authored-by: Lauri Mesilaakso --- modules/stranger/main.nf | 5 ++++- modules/stranger/meta.yml | 4 ++++ tests/modules/stranger/main.nf | 24 +++++++++++++++--------- tests/modules/stranger/test.yml | 16 +++++++++++++++- 4 files changed, 38 insertions(+), 11 deletions(-) diff --git a/modules/stranger/main.nf b/modules/stranger/main.nf index 2e647627..55678bd3 100644 --- a/modules/stranger/main.nf +++ b/modules/stranger/main.nf @@ -9,6 +9,7 @@ process STRANGER { input: tuple val(meta), path(vcf) + path variant_catalog output: tuple val(meta), path("*.gz"), emit: vcf @@ -20,10 +21,12 @@ process STRANGER { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" + def options_variant_catalog = variant_catalog ? "--repeats-file $variant_catalog" : "" """ stranger \\ $args \\ - $vcf | gzip --no-name > ${prefix}.vcf.gz + $vcf \\ + $options_variant_catalog | gzip --no-name > ${prefix}.vcf.gz cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/modules/stranger/meta.yml b/modules/stranger/meta.yml index 61ebc7a9..0707d806 100644 --- a/modules/stranger/meta.yml +++ b/modules/stranger/meta.yml @@ -24,6 +24,10 @@ input: type: file description: VCF with repeat expansions pattern: "*.{vcf.gz,vcf}" + - variant_catalog: + type: file + description: json file with repeat expansion sites to genotype + pattern: "*.{json}" output: - meta: diff --git a/tests/modules/stranger/main.nf b/tests/modules/stranger/main.nf index bc4bd3ce..5bd6766b 100644 --- a/tests/modules/stranger/main.nf +++ b/tests/modules/stranger/main.nf @@ -5,15 +5,21 @@ nextflow.enable.dsl = 2 include { EXPANSIONHUNTER } from '../../../modules/expansionhunter/main.nf' include { STRANGER } from '../../../modules/stranger/main.nf' + +input = [ [ id:'test', gender:'male' ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true), + ] +fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) +variant_catalog = file(params.test_data['homo_sapiens']['genome']['repeat_expansions'], checkIfExists: true) + + workflow test_stranger { - - input = [ [ id:'test', gender:'male' ], // meta map - file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), - file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true), - ] - fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) - variant_catalog = file(params.test_data['homo_sapiens']['genome']['repeat_expansions'], checkIfExists: true) - EXPANSIONHUNTER ( input, fasta, variant_catalog ) - STRANGER ( EXPANSIONHUNTER.out.vcf ) + STRANGER ( EXPANSIONHUNTER.out.vcf, variant_catalog ) +} + +workflow test_stranger_without_optional_variant_catalog { + EXPANSIONHUNTER ( input, fasta, variant_catalog ) + STRANGER ( EXPANSIONHUNTER.out.vcf, [] ) } diff --git a/tests/modules/stranger/test.yml b/tests/modules/stranger/test.yml index 821928e8..c7a6972e 100644 --- a/tests/modules/stranger/test.yml +++ b/tests/modules/stranger/test.yml @@ -8,6 +8,20 @@ - path: output/expansionhunter/versions.yml md5sum: f3962a6eecfddf9682414c0f605a885a - path: output/stranger/test.vcf.gz - md5sum: bbe15159195681d5c18596d3ad85c78f + md5sum: 68b0ca1319851134ffa8793a4704dc11 - path: output/stranger/versions.yml md5sum: 5ec35fd835fb1be50bc3e7c004310fc0 + +- name: stranger test_stranger_without_optional_variant_catalog + command: nextflow run tests/modules/stranger -entry test_stranger_without_optional_variant_catalog -c tests/config/nextflow.config + tags: + - stranger + files: + - path: output/expansionhunter/test.vcf + md5sum: cfd4a1d35c0e469b99eb6aaa6d22de76 + - path: output/expansionhunter/versions.yml + md5sum: c95af9e6d8cd9bd2ce1090ca4e7a6020 + - path: output/stranger/test.vcf.gz + md5sum: bbe15159195681d5c18596d3ad85c78f + - path: output/stranger/versions.yml + md5sum: 8558542a007e90ea5dcdceed3f12585d From ccef7f857917189fe1c2563c4d68deb038bade4b Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Tue, 19 Apr 2022 11:26:01 +0200 Subject: [PATCH 120/283] alignment --- modules/elprep/merge/main.nf | 4 ++-- tests/modules/elprep/merge/test.yml | 3 --- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/modules/elprep/merge/main.nf b/modules/elprep/merge/main.nf index 8312d35b..010156f2 100644 --- a/modules/elprep/merge/main.nf +++ b/modules/elprep/merge/main.nf @@ -11,8 +11,8 @@ process ELPREP_MERGE { tuple val(meta), path(bam) output: - tuple val(meta), path("**.{bam,sam}"), emit: bam - path "versions.yml" , emit: versions + tuple val(meta), path("**.{bam,sam}") , emit: bam + path "versions.yml" , emit: versions when: task.ext.when == null || task.ext.when diff --git a/tests/modules/elprep/merge/test.yml b/tests/modules/elprep/merge/test.yml index c66fe030..26c16f59 100644 --- a/tests/modules/elprep/merge/test.yml +++ b/tests/modules/elprep/merge/test.yml @@ -2,10 +2,7 @@ command: nextflow run ./tests/modules/elprep/merge -entry test_elprep_merge -c ./tests/config/nextflow.config -c ./tests/modules/elprep/merge/nextflow.config tags: - "elprep" - # - "elprep/merge" - # files: - path: "output/elprep/test.bam" - md5sum: e667c7caad0bc4b7ac383fd023c654fc - path: output/elprep/versions.yml From ad15d1b7922c8de1b4b84897e4a44182f5b52299 Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Tue, 19 Apr 2022 11:27:55 +0200 Subject: [PATCH 121/283] update label --- modules/elprep/merge/main.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/elprep/merge/main.nf b/modules/elprep/merge/main.nf index 010156f2..00ed4d60 100644 --- a/modules/elprep/merge/main.nf +++ b/modules/elprep/merge/main.nf @@ -1,6 +1,6 @@ process ELPREP_MERGE { tag "$meta.id" - label 'process_medium' + label 'process_low' conda (params.enable_conda ? "bioconda::elprep=5.1.2" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? From 6723ca7f9068ecbdee31dcad07f3115dc5b40667 Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Tue, 19 Apr 2022 15:05:02 +0200 Subject: [PATCH 122/283] bump version, add merge before split --- modules/bamtools/split/main.nf | 22 +++++++++++++++------- modules/bamtools/split/meta.yml | 8 +++++++- 2 files changed, 22 insertions(+), 8 deletions(-) diff --git a/modules/bamtools/split/main.nf b/modules/bamtools/split/main.nf index 014e5cdb..f6382d97 100644 --- a/modules/bamtools/split/main.nf +++ b/modules/bamtools/split/main.nf @@ -2,13 +2,14 @@ process BAMTOOLS_SPLIT { tag "$meta.id" label 'process_low' - conda (params.enable_conda ? "bioconda::bamtools=2.5.1" : null) + conda (params.enable_conda ? "bioconda::bamtools=2.5.2" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/bamtools:2.5.1--h9a82719_9' : - 'quay.io/biocontainers/bamtools:2.5.1--h9a82719_9' }" + 'https://depot.galaxyproject.org/singularity/bamtools:2.5.2--hd03093a_0' : + 'quay.io/biocontainers/bamtools:2.5.2--hd03093a_0' }" input: tuple val(meta), path(bam) + path(bam_list) output: tuple val(meta), path("*.bam"), emit: bam @@ -19,12 +20,19 @@ process BAMTOOLS_SPLIT { script: def args = task.ext.args ?: '' + def args2 = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" + def input_list = bam.collect{"-in $it"}.join(' ') + if (bam_list) { + input += " -list $bam_list" + } + """ - bamtools \\ - split \\ - -in $bam \\ - $args + bamtools merge \\ + ${input_list} \\ + $args \\ + bamtools split \\ + $args2 cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/modules/bamtools/split/meta.yml b/modules/bamtools/split/meta.yml index 0e848212..6c534281 100644 --- a/modules/bamtools/split/meta.yml +++ b/modules/bamtools/split/meta.yml @@ -23,8 +23,13 @@ input: e.g. [ id:'test', single_end:false ] - bam: type: file - description: A BAM file to split + description: A list of BAM files to merge and then split pattern: "*.bam" + - bam_list: + type: file + description: | + Input file containing bam files to merge before splitting, + one line per file output: - meta: @@ -43,3 +48,4 @@ output: authors: - "@sguizard" + - "@matthdsm" From b35264e9196d8f7c74c28a7bd5bc126a5e16afe8 Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Tue, 19 Apr 2022 15:11:37 +0200 Subject: [PATCH 123/283] bugfixes --- modules/bamtools/split/main.nf | 6 ++---- modules/bamtools/split/meta.yml | 2 +- tests/modules/bamtools/split/main.nf | 2 +- 3 files changed, 4 insertions(+), 6 deletions(-) diff --git a/modules/bamtools/split/main.nf b/modules/bamtools/split/main.nf index f6382d97..2f5ef09f 100644 --- a/modules/bamtools/split/main.nf +++ b/modules/bamtools/split/main.nf @@ -20,7 +20,6 @@ process BAMTOOLS_SPLIT { script: def args = task.ext.args ?: '' - def args2 = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" def input_list = bam.collect{"-in $it"}.join(' ') if (bam_list) { @@ -30,9 +29,8 @@ process BAMTOOLS_SPLIT { """ bamtools merge \\ ${input_list} \\ - $args \\ - bamtools split \\ - $args2 + | bamtools split \\ + $args cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/modules/bamtools/split/meta.yml b/modules/bamtools/split/meta.yml index 6c534281..0fd5d5ca 100644 --- a/modules/bamtools/split/meta.yml +++ b/modules/bamtools/split/meta.yml @@ -28,7 +28,7 @@ input: - bam_list: type: file description: | - Input file containing bam files to merge before splitting, + Optional input file containing bam files to merge before splitting, one line per file output: diff --git a/tests/modules/bamtools/split/main.nf b/tests/modules/bamtools/split/main.nf index eb0bed01..9f30c4f9 100644 --- a/tests/modules/bamtools/split/main.nf +++ b/tests/modules/bamtools/split/main.nf @@ -10,5 +10,5 @@ workflow test_bamtools_split { [ id:'test', single_end:false ], // meta map file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) ] - BAMTOOLS_SPLIT ( input ) + BAMTOOLS_SPLIT ( input, [] ) } From 07982d6cb13649c308625bb3d215c39979e1207c Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Tue, 19 Apr 2022 15:31:39 +0200 Subject: [PATCH 124/283] fix command line, fix meta.yml --- modules/bamtools/split/main.nf | 5 ++++- tests/modules/bamtools/split/test.yml | 9 +++++---- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/modules/bamtools/split/main.nf b/modules/bamtools/split/main.nf index 2f5ef09f..f72d13be 100644 --- a/modules/bamtools/split/main.nf +++ b/modules/bamtools/split/main.nf @@ -23,7 +23,10 @@ process BAMTOOLS_SPLIT { def prefix = task.ext.prefix ?: "${meta.id}" def input_list = bam.collect{"-in $it"}.join(' ') if (bam_list) { - input += " -list $bam_list" + input_list += " -list $bam_list" + } + if (!args.contains("-stub")) { + args += " -stub ${prefix}" } """ diff --git a/tests/modules/bamtools/split/test.yml b/tests/modules/bamtools/split/test.yml index 4f52e9ce..b52cc9ee 100644 --- a/tests/modules/bamtools/split/test.yml +++ b/tests/modules/bamtools/split/test.yml @@ -1,10 +1,11 @@ - name: bamtools split test_bamtools_split - command: nextflow run ./tests/modules/bamtools/split -entry test_bamtools_split -c ./tests/config/nextflow.config -c ./tests/modules/bamtools/split/nextflow.config + command: nextflow run tests/modules/bamtools/split -entry test_bamtools_split -c tests/config/nextflow.config tags: - - bamtools/split - bamtools + - bamtools/split files: - - path: output/bamtools/test.paired_end.sorted.REF_chr22.bam + - path: output/bamtools/test.REF_chr22.bam md5sum: b7dc50e0edf9c6bfc2e3b0e6d074dc07 - - path: output/bamtools/test.paired_end.sorted.REF_unmapped.bam + - path: output/bamtools/test.REF_unmapped.bam md5sum: e0754bf72c51543b2d745d96537035fb + - path: output/bamtools/versions.yml From 7630e278f3aa9abe1c1b78bac783cf9db2e66c9d Mon Sep 17 00:00:00 2001 From: "Maxime U. Garcia" Date: Tue, 19 Apr 2022 16:51:59 +0200 Subject: [PATCH 125/283] update gatk4 yml for spark (#1538) --- tests/config/pytest_modules.yml | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index cd4913cf..bbd47fe5 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -659,6 +659,10 @@ gatk4/applybqsr: - modules/gatk4/applybqsr/** - tests/modules/gatk4/applybqsr/** +gatk4/applybqsrspark: + - modules/gatk4/applybqsrspark/** + - tests/modules/gatk4/applybqsrspark/** + gatk4/applyvqsr: - modules/gatk4/applyvqsr/** - tests/modules/gatk4/applyvqsr/** @@ -667,6 +671,10 @@ gatk4/baserecalibrator: - modules/gatk4/baserecalibrator/** - tests/modules/gatk4/baserecalibrator/** +gatk4/baserecalibratorspark: + - modules/gatk4/baserecalibratorspark/** + - tests/modules/gatk4/baserecalibratorspark/** + gatk4/bedtointervallist: - modules/gatk4/bedtointervallist/** - tests/modules/gatk4/bedtointervallist/** @@ -743,6 +751,10 @@ gatk4/markduplicates: - modules/gatk4/markduplicates/** - tests/modules/gatk4/markduplicates/** +gatk4/markduplicatesspark: + - modules/gatk4/markduplicatesspark/** + - tests/modules/gatk4/markduplicatesspark/** + gatk4/mergebamalignment: - modules/gatk4/mergebamalignment/** - tests/modules/gatk4/mergebamalignment/** From 705f8c9ac4dfdf07666e71abde28f267e2dfd5eb Mon Sep 17 00:00:00 2001 From: "Maxime U. Garcia" Date: Wed, 20 Apr 2022 10:05:17 +0200 Subject: [PATCH 126/283] Add samtools/collatefastq module (#1536) * add samtools/collatefastq module * update yml file * improve output --- modules/samtools/collatefastq/main.nf | 47 ++++++++++++++++++ modules/samtools/collatefastq/meta.yml | 48 +++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/samtools/collatefastq/main.nf | 13 +++++ .../samtools/collatefastq/nextflow.config | 5 ++ tests/modules/samtools/collatefastq/test.yml | 14 ++++++ 6 files changed, 131 insertions(+) create mode 100644 modules/samtools/collatefastq/main.nf create mode 100644 modules/samtools/collatefastq/meta.yml create mode 100644 tests/modules/samtools/collatefastq/main.nf create mode 100644 tests/modules/samtools/collatefastq/nextflow.config create mode 100644 tests/modules/samtools/collatefastq/test.yml diff --git a/modules/samtools/collatefastq/main.nf b/modules/samtools/collatefastq/main.nf new file mode 100644 index 00000000..3d9becda --- /dev/null +++ b/modules/samtools/collatefastq/main.nf @@ -0,0 +1,47 @@ +process SAMTOOLS_COLLATEFASTQ { + tag "$meta.id" + label 'process_low' + + conda (params.enable_conda ? "bioconda::samtools=1.15.1" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/samtools:1.15.1--h1170115_0' : + 'quay.io/biocontainers/samtools:1.15.1--h1170115_0' }" + + input: + tuple val(meta), path(input) + + output: + //TODO might be good to have ordered output of the fastq files, so we can + // make sure the we get the right files + tuple val(meta), path("*_{1,2}.fq.gz"), path("*_other.fq.gz"), path("*_singleton.fq.gz"), emit: reads + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def args2 = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + """ + samtools collate \\ + $args \\ + --threads $task.cpus \\ + -O \\ + $input \\ + . | + + samtools fastq \\ + $args2 \\ + --threads $task.cpus \\ + -1 ${prefix}_1.fq.gz \\ + -2 ${prefix}_2.fq.gz \\ + -0 ${prefix}_other.fq.gz \\ + -s ${prefix}_singleton.fq.gz + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + END_VERSIONS + """ +} diff --git a/modules/samtools/collatefastq/meta.yml b/modules/samtools/collatefastq/meta.yml new file mode 100644 index 00000000..d3a2e3af --- /dev/null +++ b/modules/samtools/collatefastq/meta.yml @@ -0,0 +1,48 @@ +name: samtools_collatefastq +description: | + The module uses collate and then fastq methods from samtools to + convert a SAM, BAM or CRAM file to FASTQ format +keywords: + - bam2fq + - samtools + - fastq +tools: + - samtools: + description: Tools for dealing with SAM, BAM and CRAM files + homepage: None + documentation: http://www.htslib.org/doc/1.1/samtools.html + tool_dev_url: None + doi: "" + licence: ["MIT"] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - input: + type: file + description: BAM/CRAM/SAM file + pattern: "*.{bam,cram,sam}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - reads: + type: file + description: | + FASTQ files, which will be either a group of 4 files (read_1, read_2, other and singleton) + or a single interleaved .fq.gz file if the user chooses not to split the reads. + pattern: "*.fq.gz" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + +authors: + - "@lescai" + - "@maxulysse" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index bbd47fe5..607981a0 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -1567,6 +1567,10 @@ samtools/bam2fq: - modules/samtools/bam2fq/** - tests/modules/samtools/bam2fq/** +samtools/collatefastq: + - modules/samtools/collatefastq/** + - tests/modules/samtools/collatefastq/** + samtools/depth: - modules/samtools/depth/** - tests/modules/samtools/depth/** diff --git a/tests/modules/samtools/collatefastq/main.nf b/tests/modules/samtools/collatefastq/main.nf new file mode 100644 index 00000000..928742ac --- /dev/null +++ b/tests/modules/samtools/collatefastq/main.nf @@ -0,0 +1,13 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { SAMTOOLS_COLLATEFASTQ } from '../../../../modules/samtools/collatefastq/main.nf' + +workflow test_samtools_collatefastq { + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true) + ] + + SAMTOOLS_COLLATEFASTQ ( input ) +} diff --git a/tests/modules/samtools/collatefastq/nextflow.config b/tests/modules/samtools/collatefastq/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/samtools/collatefastq/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/samtools/collatefastq/test.yml b/tests/modules/samtools/collatefastq/test.yml new file mode 100644 index 00000000..2b001885 --- /dev/null +++ b/tests/modules/samtools/collatefastq/test.yml @@ -0,0 +1,14 @@ +- name: samtools fastq test_samtools_collatefastq + command: nextflow run ./tests/modules/samtools/collatefastq -entry test_samtools_collatefastq -c ./tests/config/nextflow.config -c ./tests/modules/samtools/collatefastq/nextflow.config + tags: + - samtools + - samtools/collatefastq + files: + - path: output/samtools/test_1.fq.gz + md5sum: 829732de4e937edca90f27b07e5b501a + - path: output/samtools/test_2.fq.gz + md5sum: ef27d3809e495620fd93df894280c03a + - path: output/samtools/test_other.fq.gz + md5sum: 709872fc2910431b1e8b7074bfe38c67 + - path: output/samtools/test_singleton.fq.gz + md5sum: 709872fc2910431b1e8b7074bfe38c67 From 6c45773c0b7f4bdaa8c9716091b0e721a5ae96f3 Mon Sep 17 00:00:00 2001 From: louperelo <44900284+louperelo@users.noreply.github.com> Date: Wed, 20 Apr 2022 11:53:26 +0200 Subject: [PATCH 127/283] add module AMPlify (#1498) * add module AMPlify * Apply suggestions from code review Thanks for the review! Co-authored-by: Moritz E. Beber * removed trailing whitespaces * Apply suggestions from code review Thanks again! Co-authored-by: Moritz E. Beber * Apply suggestions from code review Thank you for the suggestions! Co-authored-by: Sateesh Peri <33637490+sateeshperi@users.noreply.github.com> Co-authored-by: James A. Fellows Yates * Apply suggestions from code review Co-authored-by: Sateesh Peri <33637490+sateeshperi@users.noreply.github.com> * including review suggestions * fix versions.yml * add model_dir input * add model_dir to meta.yml * complete faa pattern in meta.yml * add fa.gz to pattern Co-authored-by: Moritz E. Beber Co-authored-by: Sateesh Peri <33637490+sateeshperi@users.noreply.github.com> Co-authored-by: James A. Fellows Yates --- modules/amplify/predict/main.nf | 41 ++++++++++++++++ modules/amplify/predict/meta.yml | 47 +++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/amplify/predict/main.nf | 18 +++++++ tests/modules/amplify/predict/nextflow.config | 5 ++ tests/modules/amplify/predict/test.yml | 9 ++++ 6 files changed, 124 insertions(+) create mode 100644 modules/amplify/predict/main.nf create mode 100644 modules/amplify/predict/meta.yml create mode 100644 tests/modules/amplify/predict/main.nf create mode 100644 tests/modules/amplify/predict/nextflow.config create mode 100644 tests/modules/amplify/predict/test.yml diff --git a/modules/amplify/predict/main.nf b/modules/amplify/predict/main.nf new file mode 100644 index 00000000..d035516f --- /dev/null +++ b/modules/amplify/predict/main.nf @@ -0,0 +1,41 @@ +def VERSION = '1.0.3' // Version information not provided by tool + +process AMPLIFY_PREDICT { + tag "$meta.id" + label 'process_low' + + conda (params.enable_conda ? "bioconda::amplify=1.0.3" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/amplify:1.0.3--py36hdfd78af_0': + 'quay.io/biocontainers/amplify:1.0.3--py36hdfd78af_0' }" + + input: + tuple val(meta), path(faa) + path(model_dir) + + output: + tuple val(meta), path('*.tsv'), emit: tsv + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def custom_model_dir = model_dir ? "-md ${model_dir}" : "" + """ + AMPlify \\ + $args \\ + ${custom_model_dir} \\ + -s '${faa}' + + #rename output, because tool includes date and time in name + mv *.tsv ${prefix}.tsv + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + AMPlify: $VERSION + END_VERSIONS + """ +} diff --git a/modules/amplify/predict/meta.yml b/modules/amplify/predict/meta.yml new file mode 100644 index 00000000..c9ffe8a4 --- /dev/null +++ b/modules/amplify/predict/meta.yml @@ -0,0 +1,47 @@ +name: "amplify_predict" +description: AMPlify is an attentive deep learning model for antimicrobial peptide prediction. +keywords: + - antimicrobial peptides + - AMPs + - prediction + - model +tools: + - "amplify": + description: "Attentive deep learning model for antimicrobial peptide prediction" + homepage: "https://github.com/bcgsc/AMPlify" + documentation: "https://github.com/bcgsc/AMPlify" + tool_dev_url: "https://github.com/bcgsc/AMPlify" + doi: "https://doi.org/10.1186/s12864-022-08310-4" + licence: "['GPL v3']" + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - faa: + type: file + description: amino acid sequences fasta + pattern: "*.{fa,fa.gz,faa,faa.gz,fasta,fasta.gz}" + - model_dir: + type: directory + description: Directory of where models are stored (optional) + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - tsv: + type: file + description: amino acid sequences with prediction (AMP, non-AMP) and probability scores + pattern: "*.{tsv}" + +authors: + - "@louperelo" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 607981a0..f8db3e87 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -26,6 +26,10 @@ allelecounter: - modules/allelecounter/** - tests/modules/allelecounter/** +amplify/predict: + - modules/amplify/predict/** + - tests/modules/amplify/predict/** + amps: - modules/amps/** - tests/modules/amps/** diff --git a/tests/modules/amplify/predict/main.nf b/tests/modules/amplify/predict/main.nf new file mode 100644 index 00000000..05db4cdb --- /dev/null +++ b/tests/modules/amplify/predict/main.nf @@ -0,0 +1,18 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { PRODIGAL } from '../../../modules/prodigal/main.nf' addParams( options: [:] ) +include { AMPLIFY_PREDICT } from '../../../../modules/amplify/predict/main.nf' addParams( options: [:] ) + +workflow amplify_predict { + + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['contigs_fasta'], checkIfExists: true) + ] + model_dir = [] + + PRODIGAL ( input, "gff" ) + AMPLIFY_PREDICT ( PRODIGAL.out.amino_acid_fasta, model_dir) +} diff --git a/tests/modules/amplify/predict/nextflow.config b/tests/modules/amplify/predict/nextflow.config new file mode 100644 index 00000000..50f50a7a --- /dev/null +++ b/tests/modules/amplify/predict/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} \ No newline at end of file diff --git a/tests/modules/amplify/predict/test.yml b/tests/modules/amplify/predict/test.yml new file mode 100644 index 00000000..3bc92889 --- /dev/null +++ b/tests/modules/amplify/predict/test.yml @@ -0,0 +1,9 @@ +- name: amplify predict amplify_predict + command: nextflow run tests/modules/amplify/predict -entry amplify_predict -c tests/config/nextflow.config + tags: + - amplify/predict + - amplify + files: + - path: output/amplify/test.tsv + md5sum: 1951084ce1d410028be86754997e5852 + - path: output/amplify/versions.yml From 37bf3936f3665483d070a5e0e0b314311032af7c Mon Sep 17 00:00:00 2001 From: "Maxime U. Garcia" Date: Wed, 20 Apr 2022 16:26:56 +0200 Subject: [PATCH 128/283] add decompress possibilities to bgzip (#1540) * add decompress possibilities to bgzip * spacing --- modules/tabix/bgzip/main.nf | 11 +++++++---- modules/tabix/bgzip/meta.yml | 15 ++++++++------- tests/config/test_data.config | 2 +- tests/modules/tabix/bgzip/main.nf | 10 +++++++++- tests/modules/tabix/bgzip/test.yml | 12 ++++++++++-- 5 files changed, 35 insertions(+), 15 deletions(-) diff --git a/modules/tabix/bgzip/main.nf b/modules/tabix/bgzip/main.nf index 90940a5d..18e83c84 100644 --- a/modules/tabix/bgzip/main.nf +++ b/modules/tabix/bgzip/main.nf @@ -11,17 +11,20 @@ process TABIX_BGZIP { tuple val(meta), path(input) output: - tuple val(meta), path("*.gz"), emit: gz - path "versions.yml" , emit: versions + tuple val(meta), path("${prefix}*"), emit: output + path "versions.yml" , emit: versions when: task.ext.when == null || task.ext.when script: def args = task.ext.args ?: '' - def prefix = task.ext.prefix ?: "${meta.id}" + prefix = task.ext.prefix ?: "${meta.id}" + in_bgzip = input.toString().endsWith(".gz") + command1 = in_bgzip ? '-d' : '-c' + command2 = in_bgzip ? '' : " > ${prefix}.${input.getExtension()}.gz" """ - bgzip -c $args $input > ${prefix}.${input.getExtension()}.gz + bgzip $command1 $args -@${task.cpus} $input $command2 cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/modules/tabix/bgzip/meta.yml b/modules/tabix/bgzip/meta.yml index 207427e4..50070175 100644 --- a/modules/tabix/bgzip/meta.yml +++ b/modules/tabix/bgzip/meta.yml @@ -1,13 +1,14 @@ name: tabix_bgzip -description: Compresses files +description: Compresses/decompresses files keywords: - compress + - decompress - bgzip - tabix tools: - bgzip: description: | - Bgzip compresses files in a similar manner to, and compatible with, gzip. + Bgzip compresses or decompresses files in a similar manner to, and compatible with, gzip. homepage: https://www.htslib.org/doc/tabix.html documentation: http://www.htslib.org/doc/bgzip.html doi: 10.1093/bioinformatics/btp352 @@ -18,19 +19,19 @@ input: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - file: + - input: type: file - description: text file + description: file to compress or to decompress output: - meta: type: map description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - file: + - output: type: file - description: Output compressed file - pattern: "*.{gz}" + description: Output compressed/decompressed file + pattern: "*." - versions: type: file description: File containing software versions diff --git a/tests/config/test_data.config b/tests/config/test_data.config index 97d22981..ea123732 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -397,7 +397,7 @@ params { hello = "${test_data_dir}/generic/txt/hello.txt" } 'cnn' { - reference = "${test_data_dir}/generic/cnn/reference.cnn" + reference = "${test_data_dir}/generic/cnn/reference.cnn" } 'cooler'{ test_pairix_pair_gz = "${test_data_dir}/genomics/homo_sapiens/cooler/cload/hg19/hg19.GM12878-MboI.pairs.subsample.blksrt.txt.gz" diff --git a/tests/modules/tabix/bgzip/main.nf b/tests/modules/tabix/bgzip/main.nf index 4d349890..4e326d64 100644 --- a/tests/modules/tabix/bgzip/main.nf +++ b/tests/modules/tabix/bgzip/main.nf @@ -4,10 +4,18 @@ nextflow.enable.dsl = 2 include { TABIX_BGZIP } from '../../../../modules/tabix/bgzip/main.nf' -workflow test_tabix_bgzip { +workflow test_tabix_bgzip_compress { input = [ [ id:'test' ], // meta map [ file(params.test_data['sarscov2']['illumina']['test_vcf'], checkIfExists: true) ] ] TABIX_BGZIP ( input ) } + +workflow test_tabix_bgzip_decompress { + input = [ [ id:'test' ], // meta map + [ file(params.test_data['sarscov2']['genome']['test_bed_gz'], checkIfExists: true) ] + ] + + TABIX_BGZIP ( input ) +} diff --git a/tests/modules/tabix/bgzip/test.yml b/tests/modules/tabix/bgzip/test.yml index 19357655..72819c90 100644 --- a/tests/modules/tabix/bgzip/test.yml +++ b/tests/modules/tabix/bgzip/test.yml @@ -1,8 +1,16 @@ -- name: tabix bgzip - command: nextflow run ./tests/modules/tabix/bgzip -entry test_tabix_bgzip -c ./tests/config/nextflow.config -c ./tests/modules/tabix/bgzip/nextflow.config +- name: tabix bgzip compress + command: nextflow run ./tests/modules/tabix/bgzip -entry test_tabix_bgzip_compress -c ./tests/config/nextflow.config -c ./tests/modules/tabix/bgzip/nextflow.config tags: - tabix - tabix/bgzip files: - path: ./output/tabix/test.vcf.gz md5sum: fc178eb342a91dc0d1d568601ad8f8e2 +- name: tabix bgzip decompress + command: nextflow run ./tests/modules/tabix/bgzip -entry test_tabix_bgzip_decompress -c ./tests/config/nextflow.config -c ./tests/modules/tabix/bgzip/nextflow.config + tags: + - tabix + - tabix/bgzip + files: + - path: ./output/tabix/test.bed + md5sum: fe4053cf4de3aebbdfc3be2efb125a74 From d07d270743f1433a0aff2c490db1b33360db8b79 Mon Sep 17 00:00:00 2001 From: Jasmin F <73216762+jasmezz@users.noreply.github.com> Date: Thu, 21 Apr 2022 10:38:17 +0200 Subject: [PATCH 129/283] Antismashlite download databases (#1426) * Create module antismashlitedownloaddatabases * Corrected user-specification of database directory * Updated test.yml * Apply suggestions from code review Co-authored-by: James A. Fellows Yates * Fix typo in test.yml * Feed database files via docker/singularity mount) * Add external db file mounts to the containers * Fixed docker command in main.nf * Apply prettier * Apply prettier and add PWD * Add more output to test.yml * Add more output paths to test.yml * Fixed test.yml * Apply suggestions from code review Add documentation of why we need to mount files to the containers. Co-authored-by: James A. Fellows Yates * Fix code linting errors (remove trailing whitespaces) * Fix code linting error (remove trailing whitespace) * Fix errors from Prettier linting Co-authored-by: James A. Fellows Yates --- .../antismashlitedownloaddatabases/main.nf | 46 ++++++++++++++++ .../antismashlitedownloaddatabases/meta.yml | 55 +++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ .../antismashlitedownloaddatabases/main.nf | 29 ++++++++++ .../nextflow.config | 5 ++ .../antismashlitedownloaddatabases/test.yml | 14 +++++ 6 files changed, 153 insertions(+) create mode 100644 modules/antismash/antismashlitedownloaddatabases/main.nf create mode 100644 modules/antismash/antismashlitedownloaddatabases/meta.yml create mode 100644 tests/modules/antismash/antismashlitedownloaddatabases/main.nf create mode 100644 tests/modules/antismash/antismashlitedownloaddatabases/nextflow.config create mode 100644 tests/modules/antismash/antismashlitedownloaddatabases/test.yml diff --git a/modules/antismash/antismashlitedownloaddatabases/main.nf b/modules/antismash/antismashlitedownloaddatabases/main.nf new file mode 100644 index 00000000..1853d80a --- /dev/null +++ b/modules/antismash/antismashlitedownloaddatabases/main.nf @@ -0,0 +1,46 @@ +process ANTISMASH_ANTISMASHLITEDOWNLOADDATABASES { + label 'process_low' + + conda (params.enable_conda ? "bioconda::antismash-lite=6.0.1" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/antismash-lite:6.0.1--pyhdfd78af_1' : + 'quay.io/biocontainers/antismash-lite:6.0.1--pyhdfd78af_1' }" + + /* + These files are normally downloaded by download-antismash-databases itself, and must be retrieved for input by manually running the command with conda or a standalone installation of antiSMASH. Therefore we do not recommend using this module for production pipelines, but rather require users to specify their own local copy of the antiSMASH database in pipelines. This is solely for use for CI tests of the nf-core/module version of antiSMASH. + Reason: Upon execution, the tool checks if certain database files are present within the container and if not, it tries to create them in /usr/local/bin, for which only root user has write permissions. Mounting those database files with this module prevents the tool from trying to create them. + */ + + containerOptions { + workflow.containerEngine == 'singularity' ? + "-B $database_css:/usr/local/lib/python3.8/site-packages/antismash/outputs/html/css,$database_detection:/usr/local/lib/python3.8/site-packages/antismash/detection,$database_modules:/usr/local/lib/python3.8/site-packages/antismash/modules" : + workflow.containerEngine == 'docker' ? + "-v \$PWD/$database_css:/usr/local/lib/python3.8/site-packages/antismash/outputs/html/css -v \$PWD/$database_detection:/usr/local/lib/python3.8/site-packages/antismash/detection -v \$PWD/$database_modules:/usr/local/lib/python3.8/site-packages/antismash/modules" : + '' + } + + input: + path database_css + path database_detection + path database_modules + + output: + path("antismash_db") , emit: database + path "versions.yml", emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + """ + download-antismash-databases \\ + --database-dir antismash_db \\ + $args + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + antismash: \$(antismash --version | sed 's/antiSMASH //') + END_VERSIONS + """ +} diff --git a/modules/antismash/antismashlitedownloaddatabases/meta.yml b/modules/antismash/antismashlitedownloaddatabases/meta.yml new file mode 100644 index 00000000..ad393bae --- /dev/null +++ b/modules/antismash/antismashlitedownloaddatabases/meta.yml @@ -0,0 +1,55 @@ +name: antismash_antismashlitedownloaddatabases +description: antiSMASH allows the rapid genome-wide identification, annotation and analysis of secondary metabolite biosynthesis gene clusters. This module downloads the antiSMASH databases. +keywords: + - secondary metabolites + - BGC + - biosynthetic gene cluster + - genome mining + - NRPS + - RiPP + - antibiotics + - prokaryotes + - bacteria + - eukaryotes + - fungi + - antismash + - database +tools: + - antismash: + description: antiSMASH - the antibiotics and Secondary Metabolite Analysis SHell + homepage: https://docs.antismash.secondarymetabolites.org + documentation: https://docs.antismash.secondarymetabolites.org + tool_dev_url: https://github.com/antismash/antismash + doi: "10.1093/nar/gkab335" + licence: ["AGPL v3"] + +input: + - database_css: + type: directory + description: | + antismash/outputs/html/css folder which is being created during the antiSMASH database downloading step. These files are normally downloaded by download-antismash-databases itself, and must be retrieved by the use by manually running the command with conda or a standalone installation of antiSMASH. Therefore we do not recommend using this module for production pipelines, but rather require users to specify their own local copy of the antiSMASH database in pipelines. + pattern: "css" + - database_detection: + type: directory + description: | + antismash/detection folder which is being created during the antiSMASH database downloading step. These files are normally downloaded by download-antismash-databases itself, and must be retrieved by the use by manually running the command with conda or a standalone installation of antiSMASH. Therefore we do not recommend using this module for production pipelines, but rather require users to specify their own local copy of the antiSMASH database in pipelines. + pattern: "detection" + - database_modules: + type: directory + description: | + antismash/modules folder which is being created during the antiSMASH database downloading step. These files are normally downloaded by download-antismash-databases itself, and must be retrieved by the use by manually running the command with conda or a standalone installation of antiSMASH. Therefore we do not recommend using this module for production pipelines, but rather require users to specify their own local copy of the antiSMASH database in pipelines. + pattern: "modules" + +output: + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + + - database: + type: directory + description: Download directory for antiSMASH databases + pattern: "antismash_db" + +authors: + - "@jasmezz" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index f8db3e87..9a128bd4 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -42,6 +42,10 @@ amrfinderplus/update: - modules/amrfinderplus/update/** - tests/modules/amrfinderplus/update/** +antismash/antismashlitedownloaddatabases: + - modules/antismash/antismashlitedownloaddatabases/** + - tests/modules/antismash/antismashlitedownloaddatabases/** + arriba: - modules/arriba/** - tests/modules/arriba/** diff --git a/tests/modules/antismash/antismashlitedownloaddatabases/main.nf b/tests/modules/antismash/antismashlitedownloaddatabases/main.nf new file mode 100644 index 00000000..d7289acc --- /dev/null +++ b/tests/modules/antismash/antismashlitedownloaddatabases/main.nf @@ -0,0 +1,29 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { UNTAR as UNTAR1 } from '../../../../modules/untar/main.nf' +include { UNTAR as UNTAR2 } from '../../../../modules/untar/main.nf' +include { UNTAR as UNTAR3 } from '../../../../modules/untar/main.nf' +include { ANTISMASH_ANTISMASHLITEDOWNLOADDATABASES } from '../../../../modules/antismash/antismashlitedownloaddatabases/main.nf' + +workflow test_antismash_antismashlitedownloaddatabases { + input1 = [ + [], + file('https://github.com/nf-core/test-datasets/raw/modules/data/delete_me/antismash/css.tar.gz', checkIfExists: true) + ] + input2 = [ + [], + file('https://github.com/nf-core/test-datasets/raw/modules/data/delete_me/antismash/detection.tar.gz', checkIfExists: true) + ] + input3 = [ + [], + file('https://github.com/nf-core/test-datasets/raw/modules/data/delete_me/antismash/modules.tar.gz', checkIfExists: true) + ] + + UNTAR1 ( input1 ) + UNTAR2 ( input2 ) + UNTAR3 ( input3 ) + + ANTISMASH_ANTISMASHLITEDOWNLOADDATABASES ( UNTAR1.out.untar.map{ it[1] }, UNTAR2.out.untar.map{ it[1] }, UNTAR3.out.untar.map{ it[1] } ) +} diff --git a/tests/modules/antismash/antismashlitedownloaddatabases/nextflow.config b/tests/modules/antismash/antismashlitedownloaddatabases/nextflow.config new file mode 100644 index 00000000..06a716aa --- /dev/null +++ b/tests/modules/antismash/antismashlitedownloaddatabases/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/antismash/antismashlitedownloaddatabases/test.yml b/tests/modules/antismash/antismashlitedownloaddatabases/test.yml new file mode 100644 index 00000000..3493bb4b --- /dev/null +++ b/tests/modules/antismash/antismashlitedownloaddatabases/test.yml @@ -0,0 +1,14 @@ +- name: antismash antismashlitedownloaddatabases test_antismash_antismashlitedownloaddatabases + command: nextflow run tests/modules/antismash/antismashlitedownloaddatabases -entry test_antismash_antismashlitedownloaddatabases -c tests/config/nextflow.config + tags: + - antismash/antismashlitedownloaddatabases + - antismash + files: + - path: output/antismash/versions.yml + md5sum: e2656c8d2bcc7469eba40eb1ee5c91b3 + - path: output/antismash/antismash_db + - path: output/antismash/antismash_db/clusterblast + - path: output/antismash/antismash_db/clustercompare + - path: output/antismash/antismash_db/pfam + - path: output/antismash/antismash_db/resfam + - path: output/antismash/antismash_db/tigrfam From d85f9c7011a2539dc07582c1642f4255a3724b7c Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Thu, 21 Apr 2022 13:16:23 +0200 Subject: [PATCH 130/283] add subworkflow bam_qc_picard --- subworkflows/nf-core/bam_qc_picard/main.nf | 43 +++++++++++++ subworkflows/nf-core/bam_qc_picard/meta.yml | 64 +++++++++++++++++++ .../nf-core/bam_qc_picard/main.nf | 21 ++++++ .../nf-core/bam_qc_picard/test.yml | 33 ++++++++++ 4 files changed, 161 insertions(+) create mode 100644 subworkflows/nf-core/bam_qc_picard/main.nf create mode 100644 subworkflows/nf-core/bam_qc_picard/meta.yml create mode 100644 tests/subworkflows/nf-core/bam_qc_picard/main.nf create mode 100644 tests/subworkflows/nf-core/bam_qc_picard/test.yml diff --git a/subworkflows/nf-core/bam_qc_picard/main.nf b/subworkflows/nf-core/bam_qc_picard/main.nf new file mode 100644 index 00000000..31edb2c3 --- /dev/null +++ b/subworkflows/nf-core/bam_qc_picard/main.nf @@ -0,0 +1,43 @@ +// +// Run QC steps on BAM/CRAM files using Picard +// + +params.options = [:] + +include { PICARD_COLLECTMULTIPLEMETRICS } from '../../../modules/picardcollectmultiplemetrics/main' addParams( options: params.options ) +include { PICARD_COLLECTWGSMETRICS } from '../../../modules/picardcollectwgsmetrics/main' addParams( options: params.options ) +include { PICARD_COLLECTHSMETRICS } from '../../../modules/picardcollecthsmetrics/main' addParams( options: params.options ) + +workflow BAM_QC_PICARD { + take: + ch_bam_bai // channel: [ val(meta), [ bam ], [bai/csi] ] + ch_fasta // channel: [ fasta ] + ch_bait_interval // channel: [ bait_interval ] + ch_target_interval // channel: [ target_interval ] + + main: + ch_versions = Channel.empty() + + PICARD_COLLECTMULTIPLEMETRICS( ch_bam_bai, ch_fasta] ) + ch_versions = ch_versions.mix(PICARD_COLLECTMULTIPLEMETRICS.out.versions.first()) + if (!ch_bait_interval.isEmpty() || !ch_target_interval.isEmpty()) { + if (ch_bait_interval.isEmpty()) { + throw new Error("Bait interval channel is empty") + } + if (ch_target_interval.isEmpty()) { + throw new Error("Target interval channel is empty") + } + PICARD_COLLECTHSMETRICS( ch_bam_bai, ch_fasta, ch_bait_interval, ch_target_interval ) + ch_versions = ch_versions.mix(PICARD_COLLECTHSMETRICS.out.versions.first()) + } else { + PICARD_COLLECTWGSMETRICS( ch_bam_bai, ch_fasta ) + ch_versions = ch_versions.mix(PICARD_COLLECTWGSMETRICS.out.versions.first()) + } + + emit: + hs_metrics = PICARD_COLLECTHSMETRICS.out.hs_metrics // channel: [ val(meta), [ hs_metrics ] ] + wgs_metrics = PICARD_COLLECTWGSMETRICS.out.metrics // channel: [ val(meta), [ wgs_metrics ] ] + multiple_metrics = PICARD_COLLECTMULTIPLEMETRICS.out.metrics // channel: [ val(meta), [ multiple_metrics ] ] + + versions = ch_versions // channel: [ versions.yml ] +} diff --git a/subworkflows/nf-core/bam_qc_picard/meta.yml b/subworkflows/nf-core/bam_qc_picard/meta.yml new file mode 100644 index 00000000..77104e82 --- /dev/null +++ b/subworkflows/nf-core/bam_qc_picard/meta.yml @@ -0,0 +1,64 @@ +name: bam_qc +description: Produces comprehensive statistics from BAM file +keywords: + - statistics + - counts + - hs_metrics + - wgs_metrics + - bam + - sam + - cram +modules: + - picard/collectmultiplemetrics + - picard/collectwgsmetrics + - picard/collecthsmetrics +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: BAM/CRAM/SAM file + pattern: "*.{bam,cram,sam}" + - bai: + type: file + description: Index for BAM/CRAM/SAM file + pattern: "*.{bai,crai,sai}" + - fasta: + type: optional file + description: Reference file the CRAM was created with + pattern: "*.{fasta,fa}" + - bait_intervals: + type: optional file + description: An interval list file that contains the locations of the baits used. + pattern: "baits.interval_list" + - target_intervals: + type: optional file + description: An interval list file that contains the locations of the targets. + pattern: "targets.interval_list" +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - hs_metrics: + type: file + description: Alignment metrics files generated by picard CollectHsMetrics + pattern: "*_collecthsmetrics.txt" + - wgs_metrics: + type: file + description: Alignment metrics files generated by picard CollectWgsMetrics + pattern: "*_{metrics}" + - multiple_metrics: + type: file + description: Alignment metrics files generated by picard CollectMultipleMetrics + pattern: "*_{metrics}" + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" +authors: + - "@matthdsm" diff --git a/tests/subworkflows/nf-core/bam_qc_picard/main.nf b/tests/subworkflows/nf-core/bam_qc_picard/main.nf new file mode 100644 index 00000000..d88f2bf9 --- /dev/null +++ b/tests/subworkflows/nf-core/bam_qc_picard/main.nf @@ -0,0 +1,21 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { BAM_QC_PICARD } from '../../../../subworkflows/nf-core/bam_qc_picard/main' addParams([:]) + +workflow test_bam_qc_picard_wgs { + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_single_end_bam'], checkIfExists: true) + ] + + BAM_QC_PICARD ( input, [], [], [] ) +} + +workflow test_bam_qc_picard_targetted { + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true) + ] + + BAM_QC_PICARD ( input, [], file(params.test_data['sarscov2']['genome']['baits_interval_list'], checkIfExists: true), file(params.test_data['sarscov2']['genome']['targets_interval_list'], checkIfExists: true) ) +} diff --git a/tests/subworkflows/nf-core/bam_qc_picard/test.yml b/tests/subworkflows/nf-core/bam_qc_picard/test.yml new file mode 100644 index 00000000..af98230d --- /dev/null +++ b/tests/subworkflows/nf-core/bam_qc_picard/test.yml @@ -0,0 +1,33 @@ +- name: bam qc picard wgs + command: nextflow run ./tests/subworkflows/nf-core/bam_qc_picard -entry test_bam_qc_picard_wgs -c tests/config/nextflow.config + tags: + - subworkflows + # - subworkflows/bam_qc_picard + # Modules + # - picard + # - picard/collectmultiplemetrics + # - picard/collectwgsmetrics + files: + - path: ./output/picard/test.CollectMultipleMetrics.alignment_summary_metrics + - path: ./output/picard/test.CollectMultipleMetrics.insert_size_metrics + - path: ./output/picard/test.CollectMultipleMetrics.base_distribution_by_cycle_metrics + - path: ./output/picard/test.CollectMultipleMetrics.quality_by_cycle_metrics + - path: ./output/picard/test.CollectMultipleMetrics.quality_distribution_metrics + - path: ./output/picard/test.CollectWgsMetrics.coverage_metrics + +- name: bam qc picard targetted + command: nextflow run ./tests/subworkflows/nf-core/bam_qc_picard -entry test_bam_qc_picard_targetted -c tests/config/nextflow.config + tags: + - subworkflows + # - subworkflows/bam_qc_picard + # Modules + # - picard + # - picard/collectmultiplemetrics + # - picard/collecthsmetrics + files: + - path: ./output/picard/test.CollectMultipleMetrics.alignment_summary_metrics + - path: ./output/picard/test.CollectMultipleMetrics.insert_size_metrics + - path: ./output/picard/test.CollectMultipleMetrics.base_distribution_by_cycle_metrics + - path: ./output/picard/test.CollectMultipleMetrics.quality_by_cycle_metrics + - path: ./output/picard/test.CollectMultipleMetrics.quality_distribution_metrics + - path: ./output/picard/test_collecthsmetrics.txt From abe025677cdd805cc93032341ab19885473c1a07 Mon Sep 17 00:00:00 2001 From: Francesco L <53608000+lescai@users.noreply.github.com> Date: Thu, 21 Apr 2022 14:33:59 +0200 Subject: [PATCH 131/283] update to kraken2: breaking change - output channels renamed (#1525) * updated kraken2 module to include optional classification of each input reads, and make fastq outputs optional NB: this is a breaking change, because the output channels have been renamed as a consequence of changes * updated yml * pigz command made optional, in order to be executed only if fastq of classified/unclassified reads are saved * updated test yaml file for kraken2 * fixed TODOs and renamed variables and outputs * untar in conda cannot keep same md5sum of version, and therefore md5sum check removed * improved description of the options Co-authored-by: James A. Fellows Yates --- modules/kraken2/kraken2/main.nf | 23 ++++++++++----- modules/kraken2/kraken2/meta.yml | 25 ++++++++++++---- tests/modules/kraken2/kraken2/main.nf | 14 +++++++-- tests/modules/kraken2/kraken2/test.yml | 40 +++++++++++++++++--------- 4 files changed, 75 insertions(+), 27 deletions(-) diff --git a/modules/kraken2/kraken2/main.nf b/modules/kraken2/kraken2/main.nf index 3ec5df52..d4000233 100644 --- a/modules/kraken2/kraken2/main.nf +++ b/modules/kraken2/kraken2/main.nf @@ -10,12 +10,15 @@ process KRAKEN2_KRAKEN2 { input: tuple val(meta), path(reads) path db + val save_output_fastqs + val save_reads_assignment output: - tuple val(meta), path('*classified*') , emit: classified - tuple val(meta), path('*unclassified*'), emit: unclassified - tuple val(meta), path('*report.txt') , emit: txt - path "versions.yml" , emit: versions + tuple val(meta), path('*classified*') , optional:true, emit: classified_reads_fastq + tuple val(meta), path('*unclassified*') , optional:true, emit: unclassified_reads_fastq + tuple val(meta), path('*classifiedreads*'), optional:true, emit: classified_reads_assignment + tuple val(meta), path('*report.txt') , emit: report + path "versions.yml" , emit: versions when: task.ext.when == null || task.ext.when @@ -26,19 +29,25 @@ process KRAKEN2_KRAKEN2 { def paired = meta.single_end ? "" : "--paired" def classified = meta.single_end ? "${prefix}.classified.fastq" : "${prefix}.classified#.fastq" def unclassified = meta.single_end ? "${prefix}.unclassified.fastq" : "${prefix}.unclassified#.fastq" + def classified_command = save_output_fastqs ? "--classified-out ${classified}" : "" + def unclassified_command = save_output_fastqs ? "--unclassified-out ${unclassified}" : "" + def readclassification_command = save_reads_assignment ? "--output ${prefix}.kraken2.classifiedreads.txt" : "" + def compress_reads_command = save_output_fastqs ? "pigz -p $task.cpus *.fastq" : "" + """ kraken2 \\ --db $db \\ --threads $task.cpus \\ - --unclassified-out $unclassified \\ - --classified-out $classified \\ --report ${prefix}.kraken2.report.txt \\ --gzip-compressed \\ + $unclassified_command \\ + $classified_command \\ + $readclassification_command \\ $paired \\ $args \\ $reads - pigz -p $task.cpus *.fastq + $compress_reads_command cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/modules/kraken2/kraken2/meta.yml b/modules/kraken2/kraken2/meta.yml index 9d6a3855..7129fe3a 100644 --- a/modules/kraken2/kraken2/meta.yml +++ b/modules/kraken2/kraken2/meta.yml @@ -27,25 +27,40 @@ input: - db: type: directory description: Kraken2 database + - save_output_fastqs: + type: boolean + description: | + If true, optional commands are added to save classified and unclassified reads + as fastq files + - save_reads_assignment: + type: boolean + description: | + If true, an optional command is added to save a file reporting the taxonomic + classification of each input read output: - meta: type: map description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - classified: + - classified_reads_fastq: type: file description: | - Reads classified to belong to any of the taxa + Reads classified as belonging to any of the taxa on the Kraken2 database. pattern: "*{fastq.gz}" - - unclassified: + - unclassified_reads_fastq: type: file description: | - Reads not classified to belong to any of the taxa + Reads not classified to any of the taxa on the Kraken2 database. pattern: "*{fastq.gz}" - - txt: + - classified_reads_assignment: + type: file + description: | + Kraken2 output file indicating the taxonomic assignment of + each input read + - report: type: file description: | Kraken2 report containing stats about classified diff --git a/tests/modules/kraken2/kraken2/main.nf b/tests/modules/kraken2/kraken2/main.nf index 94f4db95..4a3593e4 100644 --- a/tests/modules/kraken2/kraken2/main.nf +++ b/tests/modules/kraken2/kraken2/main.nf @@ -12,7 +12,7 @@ workflow test_kraken2_kraken2_single_end { db = [ [], file(params.test_data['sarscov2']['genome']['kraken2_tar_gz'], checkIfExists: true) ] UNTAR ( db ) - KRAKEN2_KRAKEN2 ( input, UNTAR.out.untar.map{ it[1] } ) + KRAKEN2_KRAKEN2 ( input, UNTAR.out.untar.map{ it[1] }, true, false ) } workflow test_kraken2_kraken2_paired_end { @@ -23,5 +23,15 @@ workflow test_kraken2_kraken2_paired_end { db = [ [], file(params.test_data['sarscov2']['genome']['kraken2_tar_gz'], checkIfExists: true) ] UNTAR ( db ) - KRAKEN2_KRAKEN2 ( input, UNTAR.out.untar.map{ it[1] } ) + KRAKEN2_KRAKEN2 ( input, UNTAR.out.untar.map{ it[1] }, true, false ) +} + +workflow test_kraken2_kraken2_classifyreads { + input = [ [ id:'test', single_end:true ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) ] + ] + db = [ [], file(params.test_data['sarscov2']['genome']['kraken2_tar_gz'], checkIfExists: true) ] + + UNTAR ( db ) + KRAKEN2_KRAKEN2 ( input, UNTAR.out.untar.map{ it[1] }, false, true ) } diff --git a/tests/modules/kraken2/kraken2/test.yml b/tests/modules/kraken2/kraken2/test.yml index 1ec413bf..af1e6e0d 100644 --- a/tests/modules/kraken2/kraken2/test.yml +++ b/tests/modules/kraken2/kraken2/test.yml @@ -1,29 +1,43 @@ -- name: kraken2 kraken2 single-end - command: nextflow run ./tests/modules/kraken2/kraken2 -entry test_kraken2_kraken2_single_end -c ./tests/config/nextflow.config -c ./tests/modules/kraken2/kraken2/nextflow.config +- name: kraken2 kraken2 test_kraken2_kraken2_single_end + command: nextflow run tests/modules/kraken2/kraken2 -entry test_kraken2_kraken2_single_end -c tests/config/nextflow.config tags: - kraken2 - kraken2/kraken2 files: - path: output/kraken2/test.classified.fastq.gz - should_exist: true - - path: output/kraken2/test.unclassified.fastq.gz - should_exist: true - path: output/kraken2/test.kraken2.report.txt md5sum: 4227755fe40478b8d7dc8634b489761e + - path: output/kraken2/test.unclassified.fastq.gz + - path: output/kraken2/versions.yml + md5sum: 6e3ad947ac8dee841a89216071c181cc + - path: output/untar/versions.yml -- name: kraken2 kraken2 paired-end - command: nextflow run ./tests/modules/kraken2/kraken2 -entry test_kraken2_kraken2_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/kraken2/kraken2/nextflow.config +- name: kraken2 kraken2 test_kraken2_kraken2_paired_end + command: nextflow run tests/modules/kraken2/kraken2 -entry test_kraken2_kraken2_paired_end -c tests/config/nextflow.config tags: - kraken2 - kraken2/kraken2 files: - path: output/kraken2/test.classified_1.fastq.gz - should_exist: true - path: output/kraken2/test.classified_2.fastq.gz - should_exist: true - - path: output/kraken2/test.unclassified_1.fastq.gz - should_exist: true - - path: output/kraken2/test.unclassified_2.fastq.gz - should_exist: true - path: output/kraken2/test.kraken2.report.txt md5sum: 4227755fe40478b8d7dc8634b489761e + - path: output/kraken2/test.unclassified_1.fastq.gz + - path: output/kraken2/test.unclassified_2.fastq.gz + - path: output/kraken2/versions.yml + md5sum: 604482fe7a4519f890fae9c8beb1bd6e + - path: output/untar/versions.yml + +- name: kraken2 kraken2 test_kraken2_kraken2_classifyreads + command: nextflow run tests/modules/kraken2/kraken2 -entry test_kraken2_kraken2_classifyreads -c tests/config/nextflow.config + tags: + - kraken2 + - kraken2/kraken2 + files: + - path: output/kraken2/test.kraken2.classifiedreads.txt + md5sum: e7a90531f0d8d777316515c36fe4cae0 + - path: output/kraken2/test.kraken2.report.txt + md5sum: 4227755fe40478b8d7dc8634b489761e + - path: output/kraken2/versions.yml + md5sum: 3488c304259e83c5bea573403293fce9 + - path: output/untar/versions.yml From f4c69bc4270186ca5ec174833dbe26cc0e1860fc Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Thu, 21 Apr 2022 15:39:22 +0200 Subject: [PATCH 132/283] Module/bclconvert (#1485) * bclconvert: initial commit * add most of tool * attempt at adding testing stub * add dockerfile + instructions * add container to module * update readme * more attempts at making stubs work * finish stub run * fix ci issues * more fixes to stub * add read version check to stub * fix some tests * update readme * fix version number * syntax fix * revert edit to output directory * Update modules/bclconvert/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/bclconvert/meta.yml Co-authored-by: James A. Fellows Yates * update meta.yaml * update thread usage * Update modules/bclconvert/main.nf Co-authored-by: Edmund Miller * Escape env variable * Update modules/bclconvert/Dockerfile Co-authored-by: Mark Whelan <7407040+MrMarkW@users.noreply.github.com> * fix comments by @Emiller88 * fix task.cpus Co-authored-by: James A. Fellows Yates Co-authored-by: Edmund Miller Co-authored-by: Mark Whelan <7407040+MrMarkW@users.noreply.github.com> --- modules/bclconvert/.gitignore | 2 + modules/bclconvert/Dockerfile | 15 +++++ modules/bclconvert/LICENSE | 30 +++++++++ modules/bclconvert/README.md | 17 +++++ modules/bclconvert/main.nf | 81 ++++++++++++++++++++++++ modules/bclconvert/meta.yml | 45 +++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/bclconvert/main.nf | 22 +++++++ tests/modules/bclconvert/nextflow.config | 5 ++ tests/modules/bclconvert/test.yml | 52 +++++++++++++++ 10 files changed, 273 insertions(+) create mode 100644 modules/bclconvert/.gitignore create mode 100644 modules/bclconvert/Dockerfile create mode 100644 modules/bclconvert/LICENSE create mode 100644 modules/bclconvert/README.md create mode 100644 modules/bclconvert/main.nf create mode 100644 modules/bclconvert/meta.yml create mode 100644 tests/modules/bclconvert/main.nf create mode 100644 tests/modules/bclconvert/nextflow.config create mode 100644 tests/modules/bclconvert/test.yml diff --git a/modules/bclconvert/.gitignore b/modules/bclconvert/.gitignore new file mode 100644 index 00000000..45b0ea3a --- /dev/null +++ b/modules/bclconvert/.gitignore @@ -0,0 +1,2 @@ +bcl-convert +*.rpm diff --git a/modules/bclconvert/Dockerfile b/modules/bclconvert/Dockerfile new file mode 100644 index 00000000..df3e1d0f --- /dev/null +++ b/modules/bclconvert/Dockerfile @@ -0,0 +1,15 @@ +# Dockerfile to create container with bcl-convert +# Push to nfcore/bclconvert: + +FROM debian:bullseye-slim +LABEL authors="Matthias De Smet " \ + description="Docker image containing bcl-convert" +# Disclaimer: this container is not provided nor supported by Illumina +# 'ps' command is need by some nextflow executions to collect system stats +# Install procps and clean apt cache +RUN apt-get update \ + && apt-get install -y \ + procps \ + && apt-get clean -y && rm -rf /var/lib/apt/lists/* +COPY bcl-convert /usr/local/bin/bcl-convert +RUN chmod +x /usr/local/bin/bcl-convert diff --git a/modules/bclconvert/LICENSE b/modules/bclconvert/LICENSE new file mode 100644 index 00000000..6f523227 --- /dev/null +++ b/modules/bclconvert/LICENSE @@ -0,0 +1,30 @@ +ILLUMINA END-USER SOFTWARE LICENSE AGREEMENT + +IMPORTANT-READ CAREFULLY. THIS IS A LICENSE AGREEMENT THAT YOU ARE REQUIRED TO ACCEPT BEFORE, DOWNLOADING, INSTALLING AND USING ANY SOFTWARE MADE AVAILABLE FROM THE ILLUMINA SUPPORT CENTER (https://support.illumina.com). + +CAREFULLY READ ALL THE TERMS AND CONDITIONS OF THIS LICENSE AGREEMENT BEFORE PROCEEDING WITH DOWNLOADING, INSTALLING, AND/OR USING THE SOFTWARE. YOU ARE NOT PERMITTED TO DOWNLOAD, INSTALL, AND/OR USE THE SOFTWARE UNTIL YOU HAVE AGREED TO BE BOUND BY ALL OF THE TERMS AND CONDITIONS OF THIS LICENSE AGREEMENT. YOU REPRESENT AND WARRANT THAT YOU ARE DULY AUTHORIZED TO ACCEPT THE TERMS AND CONDITIONS OF THIS LICENSE AGREEMENT ON BEHALF OF YOUR EMPLOYER. + +Software made available through the Illumina Support Center is licensed, not sold, to you. Your license to each software program made available through the Illumina Support Center is subject to your prior acceptance of either this Illumina End-User Software License Agreement (“Agreement”), or a custom end user license agreement (“Custom EULA”), if one is provided with the software. Any software that is subject to this Agreement is referred to herein as the “Software.” By accepting this Agreement, you agree the terms and conditions of this Agreement will apply to and govern any and all of your downloads, installations, and uses of each Illumina software program made available through the Illumina Support Center, except that your download, installation, and use of any software provided with a Custom EULA will be governed by the terms and conditions of the Custom EULA. + +This Agreement is made and entered into by and between Illumina, Inc., a Delaware corporation, having offices at 5200 Illumina Way, San Diego, CA 92122 (“Illumina”) and you as the end-user of the Software (hereinafter, “Licensee” or “you”). All software, firmware, and associated media, printed materials, and online and electronic documentation, including any updates or upgrades thereof, made available through the Illumina Support Center (collectively, “Software”) provided to Licensee are for use solely by Licensee and the provisions herein WILL apply with respect to such Software. + +License Grant. Subject to the terms and conditions of this Agreement, Illumina grants to Licensee, under the following terms and conditions, a personal, non-exclusive, revocable, non-transferable, non-sublicensable license, for its internal end-use purposes only, in the ordinary course of Licensee’s business to use the Software in executable object code form only, solely at the Licensee’s facility to, install and use the Software on a single computer accessible only by Licensee (and not on any public network or server), where the single computer is owned, leased, or otherwise substantially controlled by Licensee, for the purpose of processing and analyzing data generated from an Illumina genetic sequencing instrument owned and operated solely by Licensee (the “Product”). In the case of Software provided by Illumina in non-compiled form, Illumina grants Licensee a personal, non-exclusive, non-sublicenseable, restricted right to compile, install, and use one copy of the Software solely for processing and analyzing data generated from the Product. +License Restrictions. Except as expressly permitted in Section 1, Licensee may not make, have made, import, use, copy, reproduce, distribute, display, publish, sell, re-sell, lease, or sub-license the Software, in whole or in part, except as expressly provided for in this Agreement. Licensee may not modify, improve, translate, reverse engineer, decompile, disassemble, or create derivative works of the Software or otherwise attempt to (a) defeat, avoid, by-pass, remove, deactivate, or otherwise circumvent any software protection mechanisms in the Software including, without limitation, any such mechanism used to restrict or control the functionality of the Software, or (b) derive the source code or the underlying ideas, algorithms, structure, or organization form of the Software. Licensee will not allow, at any time, including during and after the term of the license, the Software or any portions or copies thereof in any form to become available to any third parties. Licensee may use the Software solely with genomic data that is generated using the Product; Licensee may not use the Software with any data generated from other products or instruments. Licensee may not use the Software to perform any data analysis services for any third party. +Ownership. The Software is protected by United States and international intellectual property laws. All right, title, and interest in and to the Software (including associated intellectual property rights) are and will remain vested in Illumina or Illumina’s affiliated companies or licensors. Licensee acknowledges that no rights, license or interest to any Illumina trademarks are granted hereunder. Licensee acknowledges that unauthorized reproduction or distribution of the Software, or any portion of it, may result in severe civil and criminal penalties. Illumina reserves all rights in and to the Software not expressly granted to Licensee under this Agreement. +Upgrades/Updates. Illumina may, at its sole discretion, provide updates or upgrades to the Software. In that case, Licensee WILL have the same rights and obligations under such updates or upgrades as it has for the versions of the Software initially provided to Licensee hereunder. Licensee recognizes that Illumina is not obligated to provide any upgrades or updates to, or support for, the Software. +Data Integrity/Loss. Licensee is responsible for the integrity and availability, including preventing the loss of data that Licensee generates, uses, analyzes, manages, or stores in connection with or through its use of the Software, including without limitation, investigating and implementing industry appropriate policies and procedures regarding the provision of access to Licensee’s data, monitoring access and use of Licensee’s data, conducting routine backups and archiving of Licensee’s data, and ensuring the adequacy of anti-virus software. Accordingly, Licensee agrees that Illumina is not responsible for any inability to access, loss or corruption of data as a result of Licensee’s use of the Software, and Illumina has no liability to Licensee in connection with such inability to access, loss or corruption of data. +Term of License. This Agreement will be in effect from the time Licensee expressly accepts the terms and conditions of this license, or otherwise installs the Software, thereby accepting the terms and conditions contained herein, and will remain in effect until terminated. This license will otherwise terminate upon the conditions set forth in this Agreement, if revoked by Illumina, or if Licensee fails to comply with any term or condition of this Agreement including failure to pay any applicable license fee. Licensee agrees upon termination of this Agreement for any reason to immediately discontinue use of and un-install the Software and destroy all copies of the Software in its possession and/or under its control, and return or destroy, at Illumina’s option, any compact disks, floppy disks or other media provided by Illumina storing the Software thereon (together with any authorized copies thereof), as well as any documentation associated therewith +Limited Warranty. Illumina warrants that, for a period of 6 months from the date of download or installation of the Software by Licensee, the Software will perform in all material respects in accordance with the accompanying documentation available on the Illumina Support Center. EXCEPT AND TO THE EXTENT EXPRESSLY PROVIDED IN THE FOREGOING, AND TO THE FULLEST EXTENT PERMITTED BY APPLICABLE LAW, THE SOFTWARE IS PROVIDED “AS IS” AND ILLUMINA EXPRESSLY DISCLAIMS ALL WARRANTIES AND CONDITIONS REGARDING THE SOFTWARE AND RESULTS GENERATED BY THE SOFTWARE, INCLUDING WITHOUT LIMITATION, TO THE FULLEST EXTENT PERMITTED BY APPLICABLE LAW, ALL OTHER EXPRESS OR IMPLIED WARRANTIES OR CONDITIONS OF MERCHANTABLE QUALITY, NON-INFRINGEMENT, AND FITNESS FOR A PARTICULAR PURPOSE, AND THOSE ARISING BY STATUTE OR OTHERWISE IN LAW OR FROM A COURSE OF DEALING OR USAGE OF TRADE. ILLUMINA DOES NOT WARRANT THAT THE FUNCTIONS CONTAINED IN THE SOFTWARE WILL MEET LICENSEE"S REQUIREMENTS, OR THAT THE OPERATION OF THE SOFTWARE WILL BE ERROR FREE OR UNINTERRUPTED. +Limitation of Liability. +(a) ILLUMINA’S ENTIRE LIABILITY AND LICENSEE"S EXCLUSIVE REMEDY UNDER THE LIMITED WARRANTY PROVISION OF SECTION 7 ABOVE WILL BE, AT ILLUMINA’S OPTION, EITHER (i) RETURN OF THE PRICE PAID FOR THE SOFTWARE, OR (ii) REPAIR OR REPLACEMENT OF THE PORTIONS OF THE SOFTWARE THAT DO NOT COMPLY WITH ILLUMINA’S LIMITED WARRANTY. THIS LIMITED WARRANTY IS VOID AND ILLUMINA WILL HAVE NO LIABILITY AT ALL IF FAILURE OF THE SOFTWARE TO COMPLY WITH ILLUMINA LIMITED WARRANTY HAS RESULTED FROM: (w) FAILURE TO USE THE SOFTWARE IN ACCORDANCE WITH ILLUMINA’S THEN CURRENT USER MANUAL OR THIS AGREEMENT; (x) ACCIDENT, ABUSE, OR MISAPPLICATION; (y) PRODUCTS OR EQUIPMENT NOT SPECIFIED BY ILLUMINA AS BEING COMPATIBLE WITH THE SOFTWARE; OR (z) IF LICENSEE HAS NOT NOTIFIED ILLUMINA IN WRITING OF THE DEFECT WITHIN THE ABOVE WARRANTY PERIOD. + +(b) TO THE FULLEST EXTENT PERMITTED BY APPLICABLE LAW, IN NO EVENT WILL ILLUMINA BE LIABLE UNDER ANY THEORY OF CONTRACT, TORT, STRICT LIABILITY OR OTHER LEGAL OR EQUITABLE THEORY FOR ANY PERSONAL INJURY OR ANY INDIRECT, CONSEQUENTIAL, OR INCIDENTAL DAMAGES, EVEN IF ILLUMINA HAS BEEN ADVISED OF THE POSSIBILITY THEREOF INCLUDING, WITHOUT LIMITATION, LOST PROFITS, LOST DATA, INTERRUPTION OF BUSINESS, LOST BUSINESS REVENUE, OTHER ECONOMIC LOSS, OR ANY LOSS OF RECORDED DATA ARISING OUT OF THE USE OF OR INABILITY TO USE THE SOFTWARE. EXCEPT AND TO THE EXTENT EXPRESSLY PROVIDED IN SECTION 7 AND 8(a) ABOVE OR AS OTHERWISE PERMITTED BY LAW, IN NO EVENT WILL ILLUMINA’S TOTAL LIABILITY TO LICENSEE FOR ALL DAMAGES (OTHER THAN AS MAY BE REQUIRED BY APPLICABLE LAW IN CASES INVOLVING PERSONAL INJURY) EXCEED THE AMOUNT OF $500 USD. THE FOREGOING LIMITATIONS WILL APPLY EVEN IF THE ABOVE STATED REMEDY FAILS OF ITS ESSENTIAL PURPOSE. + +Survival. The limitations of liability and ownership rights of Illumina contained herein and Licensee’s obligations following termination of this Agreement WILL survive the termination of this Agreement for any reason. +Research Use Only. The Software is labeled with a For Research Use Only or similar labeling statement and the performance characteristics of the Software have not been established and the Software is not for use in diagnostic procedures. Licensee acknowledges and agrees that (i) the Software has not been approved, cleared, or licensed by the United States Food and Drug Administration or any other regulatory entity whether foreign or domestic for any specific intended use, whether research, commercial, diagnostic, or otherwise, and (ii) Licensee must ensure it has any regulatory approvals that are necessary for Licensee’s intended uses of the Software. Licensee will comply with all applicable laws and regulations when using and maintaining the Software. +General. Licensee may not sublicense, assign, share, pledge, rent or transfer any of its rights under this Agreement in relation to the Software or any portion thereof including documentation. Illumina reserves the right to change this Agreement at any time. When Illumina makes any changes, Illumina will provide the updated Agreement, or a link to it, on Illumina’s website (www.illumina.com) and such updated Agreement WILL become effective immediately. Licensee’s continued access to or use of the Software represents Licensee’s agreement to any revised Agreement. If one or more provisions of this Agreement are found to be invalid or unenforceable, this Agreement WILL not be rendered inoperative but the remaining provisions WILL continue in full force and effect. This Agreement constitutes the entire agreement between the parties with respect to the subject matter of this Agreement and merges all prior communications except that a “hard-copy” form of licensing agreement relating to the Software previously agreed to in writing by Illumina and Licensee WILL supersede and govern in the event of any conflicting provisions. +Governing Law. This Agreement WILL be governed by and construed in accordance with the laws of the state of California, USA, without regard to its conflicts of laws principles, and independent of where a suit or action hereunder may be filed. +U.S. Government End Users. If Licensee is a branch agency or instrumentality of the United States Government, the following provision applies. The Software is a “commercial item” as that term is defined at 48 C.F.R. 2.101, consisting of “commercial computer software” and “commercial computer software documentation,” as such terms are used in 48 C.F.R. 12.212 or 48 C.F.R. 227.7202 (as applicable). Consistent with 48 C.F.R. 12.212 and 48 C.F.R. 227.7202-1 through 227.7202-4, all United States Government end users acquire the Software with only those rights set forth herein. +Contact. Any questions regarding legal rights, duties, obligations, or restrictions associated with the software hereunder should be directed to Illumina, Inc., 5200 Illumina Way, San Diego, CA 92122, Attention: Legal Department, Phone: (858) 202-4500, Fax: (858) 202-4599, web site: www.illumina.com . +Third Party Components. The Software may include third party software (“Third Party Programs”). Some of the Third Party Programs are available under open source or free software licenses. The License Agreement accompanying the Licensed Software does not alter any rights or obligations Licensee may have under those open source or free software licenses. The licenses that govern the terms and conditions of use of the Third Party Programs included in the Licensed Software are provided in the READ ME provided with the Software. The READ ME also contains copyright statements for the various open source software components (or portions thereof) that are distributed with the Licensed Software. +END OF END-USER SOFTWARE LICENSE AGREEMENT. diff --git a/modules/bclconvert/README.md b/modules/bclconvert/README.md new file mode 100644 index 00000000..4f8538d0 --- /dev/null +++ b/modules/bclconvert/README.md @@ -0,0 +1,17 @@ +# Updating the docker container and making a new module release + +bcl-convert is a commercial tool from Illumina. The container provided for the bcl-convert nf-core module is not provided nor supported by Illumina. Updating the bcl-convert versions in the container and pushing the update to Dockerhub needs to be done manually. + +1. Navigate to the appropriate download page. - [BCL Convert](https://support.illumina.com/sequencing/sequencing_software/bcl-convert/downloads.html): download the rpm of the desired bcl-convert version with `curl` or `wget`. +2. Unpack the RPM package using `rpm2cpio bcl-convert-*.rpm | cpio -i --make-directories`. Place the executable located in `/usr/bin/bcl-convert` in the same folder where the Dockerfile lies. +3. Create and test the container: + + ```bash + docker build . -t nfcore/bclconvert: + ``` + +4. Access rights are needed to push the container to the Dockerhub nfcore organization, please ask a core team member to do so. + + ```bash + docker push nfcore/bclconvert: + ``` diff --git a/modules/bclconvert/main.nf b/modules/bclconvert/main.nf new file mode 100644 index 00000000..e6925b50 --- /dev/null +++ b/modules/bclconvert/main.nf @@ -0,0 +1,81 @@ +process BCLCONVERT { + tag '$samplesheet' + label 'process_high' + + if (params.enable_conda) { + exit 1, "Conda environments cannot be used when using bcl-convert. Please use docker or singularity containers." + } + container "nfcore/bclconvert:3.9.3" + + input: + path samplesheet + path run_dir + + output: + path "*.fastq.gz" ,emit: fastq + path "Reports/*.{csv,xml,bin}" ,emit: reports + path "Logs/*.{log,txt}" ,emit: logs + path "InterOp/*.bin" ,emit: interop + path "versions.yml" ,emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + + """ + bcl-convert \ + $args \\ + --output-directory . \\ + --bcl-input-directory ${run_dir} \\ + --sample-sheet ${samplesheet} \\ + --bcl-num-parallel-tiles ${task.cpus} + + mkdir InterOp + cp ${run_dir}/InterOp/*.bin InterOp/ + mv Reports/*.bin InterOp/ + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + bclconvert: \$(bcl-convert -V 2>&1 | head -n 1 | sed 's/^.*Version //') + END_VERSIONS + """ + + stub: + """ + echo "sample1_S1_L001_R1_001" > sample1_S1_L001_R1_001.fastq.gz + echo "sample1_S1_L001_R2_001" > sample1_S1_L001_R2_001.fastq.gz + echo "sample1_S1_L002_R1_001" > sample1_S1_L002_R1_001.fastq.gz + echo "sample1_S1_L002_R2_001" > sample1_S1_L002_R2_001.fastq.gz + echo "sample2_S2_L001_R1_001" > sample2_S2_L001_R1_001.fastq.gz + echo "sample2_S2_L001_R2_001" > sample2_S2_L001_R2_001.fastq.gz + echo "sample2_S2_L002_R1_001" > sample2_S2_L002_R1_001.fastq.gz + echo "sample2_S2_L002_R2_001" > sample2_S2_L002_R2_001.fastq.gz + + mkdir Reports + echo "Adapter_Metrics" > Reports/Adapter_Metrics.csv + echo "Demultiplex_Stats" > Reports/Demultiplex_Stats.csv + echo "fastq_list" > Reports/fastq_list.csv + echo "Index_Hopping_Counts" > Reports/Index_Hopping_Counts.csv + echo "IndexMetricsOut" > Reports/IndexMetricsOut.bin + echo "Quality_Metrics" > Reports/Quality_Metrics.csv + echo "RunInfo" > Reports/RunInfo.xml + echo "SampleSheet" > Reports/SampleSheet.csv + echo "Top_Unknown_Barcodes" > Reports/Top_Unknown_Barcodes.csv + + mkdir Logs + echo "Errors" > Logs/Errors.log + echo "FastqComplete" > Logs/FastqComplete.txt + echo "Info" > Logs/Info.log + echo "Warnings" > Logs/Warnings.log + + mkdir InterOp/ + echo "InterOp" > InterOp/InterOp.bin + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + bclconvert: \$(bcl-convert -V 2>&1 | head -n 1 | sed 's/^.*Version //') + END_VERSIONS + """ +} diff --git a/modules/bclconvert/meta.yml b/modules/bclconvert/meta.yml new file mode 100644 index 00000000..5c59a978 --- /dev/null +++ b/modules/bclconvert/meta.yml @@ -0,0 +1,45 @@ +name: "bclconvert" +description: Demultiplex Illumina BCL files +keywords: + - demultiplex + - illumina + - fastq +tools: + - "bclconvert": + description: "Demultiplex Illumina BCL files" + homepage: "https://support.illumina.com/sequencing/sequencing_software/bcl-convert.html" + documentation: "https://support-docs.illumina.com/SW/BCL_Convert/Content/SW/FrontPages/BCL_Convert.htm" + licence: "ILLUMINA" + +input: + - samplesheet: + type: file + description: "Input samplesheet" + pattern: "*.{csv}" + - run_dir: + type: directory + description: "Input run directory containing RunInfo.xml and BCL data" + +output: + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - fastq: + type: file + description: Demultiplexed FASTQ files + pattern: "*.{fastq.gz}" + - reports: + type: file + description: Demultiplexing Reports + pattern: "Reports/*.{csv,xml}" + - logs: + type: file + description: Log files + pattern: "Logs/*.{log,txt}" + - interop: + type: file + description: Interop files + pattern: "Interop/*.{bin}" +authors: + - "@matthdsm" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 9a128bd4..786f87db 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -174,6 +174,10 @@ bcftools/view: - modules/bcftools/view/** - tests/modules/bcftools/view/** +bclconvert: + - modules/bclconvert/** + - tests/modules/bclconvert/** + bedtools/bamtobed: - modules/bedtools/bamtobed/** - tests/modules/bedtools/bamtobed/** diff --git a/tests/modules/bclconvert/main.nf b/tests/modules/bclconvert/main.nf new file mode 100644 index 00000000..e8a78e4f --- /dev/null +++ b/tests/modules/bclconvert/main.nf @@ -0,0 +1,22 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { BCLCONVERT } from '../../../modules/bclconvert/main.nf' + +process STUB_BCLCONVERT_INPUT { + output: + path "SampleSheet.csv" ,emit: samplesheet + path "DDMMYY_SERIAL_FLOWCELL" ,emit: run_dir + + stub: + """ + mkdir DDMMYY_SERIAL_FLOWCELL + echo "SampleSheet" > SampleSheet.csv + """ +} + +workflow test_bclconvert { + STUB_BCLCONVERT_INPUT () + BCLCONVERT (STUB_BCLCONVERT_INPUT.out.samplesheet, STUB_BCLCONVERT_INPUT.out.run_dir) +} diff --git a/tests/modules/bclconvert/nextflow.config b/tests/modules/bclconvert/nextflow.config new file mode 100644 index 00000000..50f50a7a --- /dev/null +++ b/tests/modules/bclconvert/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} \ No newline at end of file diff --git a/tests/modules/bclconvert/test.yml b/tests/modules/bclconvert/test.yml new file mode 100644 index 00000000..65e71a59 --- /dev/null +++ b/tests/modules/bclconvert/test.yml @@ -0,0 +1,52 @@ +- name: bclconvert test_bclconvert + command: nextflow run tests/modules/bclconvert -entry test_bclconvert -c tests/config/nextflow.config -stub-run + tags: + - bclconvert + files: + - path: output/bclconvert/InterOp/InterOp.bin + md5sum: d3dea0bb4ab1c8754af324f40b001481 + - path: output/bclconvert/Logs/Errors.log + md5sum: 334645f09074b2597a692e395b716a9c + - path: output/bclconvert/Logs/FastqComplete.txt + md5sum: a4c4c6ce2d0de67d3b7ac7d1fcb512e4 + - path: output/bclconvert/Logs/Info.log + md5sum: d238822d379f2277cac950ca986cb660 + - path: output/bclconvert/Logs/Warnings.log + md5sum: aeefd2d631817e170f88f25ecaaf4664 + - path: output/bclconvert/Reports/Adapter_Metrics.csv + md5sum: af62e9c7b44940cfd8ea11064a1f42ae + - path: output/bclconvert/Reports/Demultiplex_Stats.csv + md5sum: d11313931fcaabb5ce159462ad3dd1da + - path: output/bclconvert/Reports/IndexMetricsOut.bin + md5sum: 6bcee11c8145e3b1059ceaa91d2f5be7 + - path: output/bclconvert/Reports/Index_Hopping_Counts.csv + md5sum: 697e40e0c0d48b4bd25f138ef60b0bde + - path: output/bclconvert/Reports/Quality_Metrics.csv + md5sum: 3902fd38f6b01f1ce0f0e8724238f8f2 + - path: output/bclconvert/Reports/RunInfo.xml + md5sum: 5bef7c7e76360231b0c4afdfc915fd44 + - path: output/bclconvert/Reports/SampleSheet.csv + md5sum: c579e7d2c9c917c4cfb875a0373c0936 + - path: output/bclconvert/Reports/Top_Unknown_Barcodes.csv + md5sum: 39a5e7f6d21c12d6051afdc8261b6330 + - path: output/bclconvert/Reports/fastq_list.csv + md5sum: 32c51ab10e013fd547928de57361ffcb + - path: output/bclconvert/sample1_S1_L001_R1_001.fastq.gz + md5sum: 9b831a39755935333f86f167527a094d + - path: output/bclconvert/sample1_S1_L001_R2_001.fastq.gz + md5sum: 082f4f767b7619f409ca7e752ef482bf + - path: output/bclconvert/sample1_S1_L002_R1_001.fastq.gz + md5sum: 837764c89db93dfb53cd663c4f26f3d7 + - path: output/bclconvert/sample1_S1_L002_R2_001.fastq.gz + md5sum: 1a42cf6ba0bb8fc7770f278e6d1ab676 + - path: output/bclconvert/sample2_S2_L001_R1_001.fastq.gz + md5sum: 475bc426b7cc48d0551d40e31457dc78 + - path: output/bclconvert/sample2_S2_L001_R2_001.fastq.gz + md5sum: f670ccd7d9352e0e67fe1c1232429d94 + - path: output/bclconvert/sample2_S2_L002_R1_001.fastq.gz + md5sum: ebd5ff6fa5603e7d704b5a10598de58c + - path: output/bclconvert/sample2_S2_L002_R2_001.fastq.gz + md5sum: 2f83b460f52620d2548c7ef8845b31d7 + - path: output/stub/SampleSheet.csv + md5sum: c579e7d2c9c917c4cfb875a0373c0936 + - path: output/bclconvert/versions.yml From 12afb6b0faf3cabf769c9a2a7dd477e3f066eac0 Mon Sep 17 00:00:00 2001 From: Lucpen Date: Fri, 22 Apr 2022 10:01:47 +0200 Subject: [PATCH 133/283] Update samtools view to add input path(index) (#1539) * feat added index as input, to allow module to be used for subsampling * fix test * feat added index to meta.yml * Update modules/samtools/view/meta.yml feat corrected description of idea pattern file in meta.yml Co-authored-by: James A. Fellows Yates Co-authored-by: James A. Fellows Yates --- modules/samtools/view/main.nf | 2 +- modules/samtools/view/meta.yml | 4 ++++ tests/modules/samtools/view/main.nf | 7 ++++--- 3 files changed, 9 insertions(+), 4 deletions(-) diff --git a/modules/samtools/view/main.nf b/modules/samtools/view/main.nf index 5f14fbbf..11cfb74b 100644 --- a/modules/samtools/view/main.nf +++ b/modules/samtools/view/main.nf @@ -8,7 +8,7 @@ process SAMTOOLS_VIEW { 'quay.io/biocontainers/samtools:1.15.1--h1170115_0' }" input: - tuple val(meta), path(input) + tuple val(meta), path(input), path(index) path fasta output: diff --git a/modules/samtools/view/meta.yml b/modules/samtools/view/meta.yml index 5604bfa7..a8b43ecc 100644 --- a/modules/samtools/view/meta.yml +++ b/modules/samtools/view/meta.yml @@ -25,6 +25,10 @@ input: type: file description: BAM/CRAM/SAM file pattern: "*.{bam,cram,sam}" + - index: + type: optional file + description: BAM.BAI/CRAM.CRAI file + pattern: "*.{.bai,.crai}" - fasta: type: optional file description: Reference file the CRAM was created with diff --git a/tests/modules/samtools/view/main.nf b/tests/modules/samtools/view/main.nf index 8ee27ef8..9c239066 100644 --- a/tests/modules/samtools/view/main.nf +++ b/tests/modules/samtools/view/main.nf @@ -6,7 +6,8 @@ include { SAMTOOLS_VIEW } from '../../../../modules/samtools/view/main.nf' workflow test_samtools_view { input = [ [ id:'test', single_end:false ], // meta map - file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true) + file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true), + [] ] SAMTOOLS_VIEW ( input, [] ) @@ -14,8 +15,8 @@ workflow test_samtools_view { workflow test_samtools_view_cram { input = [ [ id: 'test' ], // meta map - file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram'], checkIfExists: true), - file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram_crai'], checkIfExists: true) + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_recalibrated_sorted_cram_crai'], checkIfExists: true) ] fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) From c7329a3a5730872dea512921217993cfa7acd959 Mon Sep 17 00:00:00 2001 From: Ramprasad Neethiraj <20065894+ramprasadn@users.noreply.github.com> Date: Fri, 22 Apr 2022 10:10:43 +0200 Subject: [PATCH 134/283] add stubs for stranger (#1543) --- modules/stranger/main.nf | 11 +++++++++++ tests/modules/stranger/main.nf | 5 +++++ tests/modules/stranger/test.yml | 10 ++++++++++ 3 files changed, 26 insertions(+) diff --git a/modules/stranger/main.nf b/modules/stranger/main.nf index 55678bd3..ddfa0070 100644 --- a/modules/stranger/main.nf +++ b/modules/stranger/main.nf @@ -33,4 +33,15 @@ process STRANGER { stranger: \$( stranger --version ) END_VERSIONS """ + + stub: + def prefix = task.ext.prefix ?: "${meta.id}" + """ + touch ${prefix}.vcf.gz + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + stranger: \$( stranger --version ) + END_VERSIONS + """ } diff --git a/tests/modules/stranger/main.nf b/tests/modules/stranger/main.nf index 5bd6766b..4a930c19 100644 --- a/tests/modules/stranger/main.nf +++ b/tests/modules/stranger/main.nf @@ -23,3 +23,8 @@ workflow test_stranger_without_optional_variant_catalog { EXPANSIONHUNTER ( input, fasta, variant_catalog ) STRANGER ( EXPANSIONHUNTER.out.vcf, [] ) } + +workflow test_stranger_without_optional_variant_catalog_stubs { + EXPANSIONHUNTER ( input, fasta, variant_catalog ) + STRANGER ( EXPANSIONHUNTER.out.vcf, [] ) +} diff --git a/tests/modules/stranger/test.yml b/tests/modules/stranger/test.yml index c7a6972e..bf922c86 100644 --- a/tests/modules/stranger/test.yml +++ b/tests/modules/stranger/test.yml @@ -25,3 +25,13 @@ md5sum: bbe15159195681d5c18596d3ad85c78f - path: output/stranger/versions.yml md5sum: 8558542a007e90ea5dcdceed3f12585d + +- name: stranger test_stranger_without_optional_variant_catalog_stubs + command: nextflow run tests/modules/stranger -entry test_stranger_without_optional_variant_catalog -c tests/config/nextflow.config -stub-run + tags: + - stranger + files: + - path: output/expansionhunter/test.vcf + - path: output/expansionhunter/versions.yml + - path: output/stranger/test.vcf.gz + - path: output/stranger/versions.yml From 35231d394940dca2291ac2321c8f9b2e3b039905 Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Fri, 22 Apr 2022 10:13:57 +0200 Subject: [PATCH 135/283] update picard/collecthsmetrics (#1542) * update picard/collecthsmetrics * syntax fixes, bugfixes * add tests Co-authored-by: Jose Espinosa-Carrasco --- modules/picard/collecthsmetrics/main.nf | 9 +++++---- modules/picard/collecthsmetrics/meta.yml | 7 ++++--- tests/modules/picard/collecthsmetrics/main.nf | 2 +- tests/modules/picard/collecthsmetrics/test.yml | 2 +- 4 files changed, 11 insertions(+), 9 deletions(-) diff --git a/modules/picard/collecthsmetrics/main.nf b/modules/picard/collecthsmetrics/main.nf index 3acf8bb8..ef7a9b9f 100644 --- a/modules/picard/collecthsmetrics/main.nf +++ b/modules/picard/collecthsmetrics/main.nf @@ -15,8 +15,8 @@ process PICARD_COLLECTHSMETRICS { path target_intervals output: - tuple val(meta), path("*collecthsmetrics.txt"), emit: hs_metrics - path "versions.yml" , emit: versions + tuple val(meta), path("*_metrics") , emit: metrics + path "versions.yml" , emit: versions when: task.ext.when == null || task.ext.when @@ -41,7 +41,8 @@ process PICARD_COLLECTHSMETRICS { -BAIT_INTERVALS $bait_intervals \\ -TARGET_INTERVALS $target_intervals \\ -INPUT $bam \\ - -OUTPUT ${prefix}_collecthsmetrics.txt + -OUTPUT ${prefix}.CollectHsMetrics.coverage_metrics + cat <<-END_VERSIONS > versions.yml "${task.process}": @@ -52,7 +53,7 @@ process PICARD_COLLECTHSMETRICS { stub: def prefix = task.ext.prefix ?: "${meta.id}" """ - touch ${prefix}_collecthsmetrics.txt + touch ${prefix}.CollectHsMetrics.coverage_metrics cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/modules/picard/collecthsmetrics/meta.yml b/modules/picard/collecthsmetrics/meta.yml index 4b94909f..dc9d647a 100644 --- a/modules/picard/collecthsmetrics/meta.yml +++ b/modules/picard/collecthsmetrics/meta.yml @@ -57,10 +57,11 @@ output: type: file description: File containing software versions pattern: "versions.yml" - - hs_metrics: + - metrics: type: file - description: The metrics file. - pattern: "*_collecthsmetrics.txt" + description: Alignment metrics files generated by picard + pattern: "*_{metrics}" authors: - "@projectoriented" + - "@matthdsm" diff --git a/tests/modules/picard/collecthsmetrics/main.nf b/tests/modules/picard/collecthsmetrics/main.nf index 2e8727b5..a28eb174 100644 --- a/tests/modules/picard/collecthsmetrics/main.nf +++ b/tests/modules/picard/collecthsmetrics/main.nf @@ -7,7 +7,7 @@ include { PICARD_COLLECTHSMETRICS } from '../../../../modules/picard/collecthsme workflow test_picard_collecthsmetrics { input = [ [ id:'test', single_end:false ], // meta map - file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true) ] + file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true) ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) fai = file(params.test_data['sarscov2']['genome']['genome_fasta_fai'], checkIfExists: true) diff --git a/tests/modules/picard/collecthsmetrics/test.yml b/tests/modules/picard/collecthsmetrics/test.yml index 9232d508..9aa14f15 100644 --- a/tests/modules/picard/collecthsmetrics/test.yml +++ b/tests/modules/picard/collecthsmetrics/test.yml @@ -5,4 +5,4 @@ - picard/collecthsmetrics files: # The file can't be md5'd consistently - - path: output/picard/test_collecthsmetrics.txt + - path: output/picard/test.CollectHsMetrics.coverage_metrics From 90b203d3e915cce7434ed010b8a56a89f4142bdd Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Fri, 22 Apr 2022 10:20:05 +0200 Subject: [PATCH 136/283] Tool/elprep split (#1533) * tool: elprep split * fixes for testing * fix tests * fix test outputs * create test-yaml * fix suggestions by @jfy133 Co-authored-by: James A. Fellows Yates --- modules/elprep/split/main.nf | 44 ++++++++++++++++++++++ modules/elprep/split/meta.yml | 43 +++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/elprep/split/main.nf | 15 ++++++++ tests/modules/elprep/split/nextflow.config | 9 +++++ tests/modules/elprep/split/test.yml | 10 +++++ 6 files changed, 125 insertions(+) create mode 100644 modules/elprep/split/main.nf create mode 100644 modules/elprep/split/meta.yml create mode 100644 tests/modules/elprep/split/main.nf create mode 100644 tests/modules/elprep/split/nextflow.config create mode 100644 tests/modules/elprep/split/test.yml diff --git a/modules/elprep/split/main.nf b/modules/elprep/split/main.nf new file mode 100644 index 00000000..8af558d4 --- /dev/null +++ b/modules/elprep/split/main.nf @@ -0,0 +1,44 @@ +process ELPREP_SPLIT { + tag "$meta.id" + label 'process_low' + + conda (params.enable_conda ? "bioconda::elprep=5.1.2" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/elprep:5.1.2--he881be0_0': + 'quay.io/biocontainers/elprep:5.1.2--he881be0_0' }" + + input: + tuple val(meta), path(bam) + + output: + tuple val(meta), path("**.{bam,sam}"), emit: bam + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + meta.single_end ? args += " --single-end": "" + + """ + # create directory and move all input so elprep can find and merge them before splitting + mkdir input + mv ${bam} input/ + + mkdir ${prefix} + + elprep split \\ + input \\ + . \\ + $args \\ + --nr-of-threads $task.cpus \\ + --output-prefix $prefix + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + elprep: \$(elprep 2>&1 | head -n2 | tail -n1 |sed 's/^.*version //;s/ compiled.*\$//') + END_VERSIONS + """ +} diff --git a/modules/elprep/split/meta.yml b/modules/elprep/split/meta.yml new file mode 100644 index 00000000..b99562fa --- /dev/null +++ b/modules/elprep/split/meta.yml @@ -0,0 +1,43 @@ +name: "elprep_split" +description: Split bam file into manageable chunks +keywords: + - bam + - split by chromosome +tools: + - "elprep": + description: "elPrep is a high-performance tool for preparing .sam/.bam files for variant calling in sequencing pipelines. It can be used as a drop-in replacement for SAMtools/Picard/GATK4." + homepage: "https://github.com/ExaScience/elprep" + documentation: "https://github.com/ExaScience/elprep" + tool_dev_url: "https://github.com/ExaScience/elprep" + doi: "10.1371" + licence: "['AGPL v3']" + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: List of BAM/SAM files + pattern: "*.{bam,sam}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + # + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - bam: + type: file + description: List of split BAM/SAM files + pattern: "*.{bam,sam}" + +authors: + - "@matthdsm" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 786f87db..19b51f3d 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -599,6 +599,10 @@ ectyper: - modules/ectyper/** - tests/modules/ectyper/** +elprep/split: + - modules/elprep/split/** + - tests/modules/elprep/split/** + emmtyper: - modules/emmtyper/** - tests/modules/emmtyper/** diff --git a/tests/modules/elprep/split/main.nf b/tests/modules/elprep/split/main.nf new file mode 100644 index 00000000..d5a111de --- /dev/null +++ b/tests/modules/elprep/split/main.nf @@ -0,0 +1,15 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { ELPREP_SPLIT } from '../../../../modules/elprep/split/main.nf' + +workflow test_elprep_split { + + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) + ] + + ELPREP_SPLIT ( input ) +} diff --git a/tests/modules/elprep/split/nextflow.config b/tests/modules/elprep/split/nextflow.config new file mode 100644 index 00000000..a3ae0169 --- /dev/null +++ b/tests/modules/elprep/split/nextflow.config @@ -0,0 +1,9 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName : ELPREP_SPLIT { + ext.args = "--contig-group-size 1 --output-type bam" + } + +} diff --git a/tests/modules/elprep/split/test.yml b/tests/modules/elprep/split/test.yml new file mode 100644 index 00000000..7ba139b1 --- /dev/null +++ b/tests/modules/elprep/split/test.yml @@ -0,0 +1,10 @@ +- name: elprep split test_elprep_split + command: nextflow run tests/modules/elprep/split -entry test_elprep_split -c tests/config/nextflow.config + tags: + - elprep + - elprep/split + files: + - path: output/elprep/splits/test-group00001.bam + - path: output/elprep/splits/test-unmapped.bam + - path: output/elprep/test-spread.bam + - path: output/elprep/versions.yml From 9e3daae8ef8cc1e830c9ef8af5336df7065d2823 Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Fri, 22 Apr 2022 11:08:03 +0200 Subject: [PATCH 137/283] New module: elprep filter (#1524) * first commit * syntax fix * fix input * output sam during test for md5sum * replace md5sum with contains * add new test data, add extra in/outputs * cli fixes * fix outputs * Update modules/elprep/filter/main.nf Co-authored-by: James A. Fellows Yates * Update modules/elprep/filter/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/elprep/filter/meta.yml Co-authored-by: James A. Fellows Yates * fix suggestions by @jfy133 * Bit more verbose explanation for bool vals * define variables * fix prettier Co-authored-by: James A. Fellows Yates --- modules/elprep/filter/main.nf | 89 ++++++++++++++++ modules/elprep/filter/meta.yml | 106 ++++++++++++++++++++ tests/config/pytest_modules.yml | 4 + tests/config/test_data.config | 3 + tests/modules/elprep/filter/main.nf | 18 ++++ tests/modules/elprep/filter/nextflow.config | 7 ++ tests/modules/elprep/filter/test.yml | 13 +++ 7 files changed, 240 insertions(+) create mode 100644 modules/elprep/filter/main.nf create mode 100644 modules/elprep/filter/meta.yml create mode 100644 tests/modules/elprep/filter/main.nf create mode 100644 tests/modules/elprep/filter/nextflow.config create mode 100644 tests/modules/elprep/filter/test.yml diff --git a/modules/elprep/filter/main.nf b/modules/elprep/filter/main.nf new file mode 100644 index 00000000..02c93186 --- /dev/null +++ b/modules/elprep/filter/main.nf @@ -0,0 +1,89 @@ +process ELPREP_FILTER { + tag "$meta.id" + label 'process_high' + + conda (params.enable_conda ? "bioconda::elprep=5.1.2" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/elprep:5.1.2--he881be0_0': + 'quay.io/biocontainers/elprep:5.1.2--he881be0_0' }" + + input: + tuple val(meta), path(bam) + val(run_haplotypecaller) + val(run_bqsr) + path(reference_sequences) + path(filter_regions_bed) + path(reference_elfasta) + path(known_sites_elsites) + path(target_regions_bed) + path(intermediate_bqsr_tables) + val(bqsr_tables_only) + val(get_activity_profile) + val(get_assembly_regions) + + + output: + tuple val(meta), path("**.{bam,sam}") ,emit: bam + tuple val(meta), path("*.metrics.txt") ,optional: true, emit: metrics + tuple val(meta), path("*.recall") ,optional: true, emit: recall + tuple val(meta), path("*.vcf.gz") ,optional: true, emit: gvcf + tuple val(meta), path("*.table") ,optional: true, emit: table + tuple val(meta), path("*.activity_profile.igv") ,optional: true, emit: activity_profile + tuple val(meta), path("*.assembly_regions.igv") ,optional: true, emit: assembly_regions + path "versions.yml" ,emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def suffix = args.contains("--output-type sam") ? "sam" : "bam" + + // filter args + def reference_sequences_cmd = reference_sequences ? " --replace-reference-sequences ${reference_sequences}" : "" + def filter_regions_cmd = filter_regions_bed ? " --filter-non-overlapping-reads ${filter_regions_bed}" : "" + + // markdup args + def markdup_cmd = args.contains("--mark-duplicates") ? " --mark-optical-duplicates ${prefix}.metrics.txt": "" + + // variant calling args + def haplotyper_cmd = run_haplotypecaller ? " --haplotypecaller ${prefix}.g.vcf.gz": "" + + def fasta_cmd = reference_elfasta ? " --reference ${reference_elfasta}": "" + def known_sites_cmd = known_sites_elsites ? " --known-sites ${known_sites_elsites}": "" + def target_regions_cmd = target_regions_bed ? " --target-regions ${target_regions_bed}": "" + + // bqsr args + def bqsr_cmd = run_bqsr ? " --bqsr ${prefix}.recall": "" + def bqsr_tables_only_cmd = bqsr_tables_only ? " --bqsr-tables-only ${prefix}.table": "" + + def intermediate_bqsr_cmd = intermediate_bqsr_tables ? " --bqsr-apply .": "" + + // misc + def activity_profile_cmd = get_activity_profile ? " --activity-profile ${prefix}.activity_profile.igv": "" + def assembly_regions_cmd = get_assembly_regions ? " --assembly-regions ${prefix}.assembly_regions.igv": "" + + """ + elprep filter ${bam} ${prefix}.${suffix} \\ + ${reference_sequences_cmd} \\ + ${filter_regions_cmd} \\ + ${markdup_cmd} \\ + ${haplotyper_cmd} \\ + ${fasta_cmd} \\ + ${known_sites_cmd} \\ + ${target_regions_cmd} \\ + ${bqsr_cmd} \\ + ${bqsr_tables_only_cmd} \\ + ${intermediate_bqsr_cmd} \\ + ${activity_profile_cmd} \\ + ${assembly_regions_cmd} \\ + --nr-of-threads ${task.cpus} \\ + $args + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + elprep: \$(elprep 2>&1 | head -n2 | tail -n1 |sed 's/^.*version //;s/ compiled.*\$//') + END_VERSIONS + """ +} diff --git a/modules/elprep/filter/meta.yml b/modules/elprep/filter/meta.yml new file mode 100644 index 00000000..d7d41071 --- /dev/null +++ b/modules/elprep/filter/meta.yml @@ -0,0 +1,106 @@ +name: "elprep_filter" +description: "Filter, sort and markdup sam/bam files, with optional BQSR and variant calling." +keywords: + - sort + - bam + - sam + - filter + - variant calling +tools: + - "elprep": + description: "elPrep is a high-performance tool for preparing .sam/.bam files for variant calling in sequencing pipelines. It can be used as a drop-in replacement for SAMtools/Picard/GATK4." + homepage: "https://github.com/ExaScience/elprep" + documentation: "https://github.com/ExaScience/elprep" + tool_dev_url: "https://github.com/ExaScience/elprep" + doi: "10.1371/journal.pone.0244471" + licence: "['AGPL v3']" + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - bam: + type: file + description: Input SAM/BAM file + pattern: "*.{bam,sam}" + - run_haplotypecaller: + type: boolean + description: Run variant calling on the input files. Needed to generate gvcf output. + - run_bqsr: + type: boolean + description: Run BQSR on the input files. Needed to generate recall metrics. + - reference_sequences: + type: file + description: Optional SAM header to replace existing header. + pattern: "*.sam" + - filter_regions_bed: + type: file + description: Optional BED file containing regions to filter. + pattern: "*.bed" + - reference_elfasta: + type: file + description: Elfasta file, required for BQSR and variant calling. + pattern: "*.elfasta" + - known_sites: + type: file + description: Optional elsites file containing known SNPs for BQSR. + pattern: "*.elsites" + - target_regions_bed: + type: file + description: Optional BED file containing target regions for BQSR and variant calling. + pattern: "*.bed" + - intermediate_bqsr_tables: + type: file + description: Optional list of BQSR tables, used when parsing files created by `elprep split` + pattern: "*.table" + - bqsr_tables_only: + type: boolean + description: Write intermediate BQSR tables, used when parsing files created by `elprep split`. + - get_activity_profile: + type: boolean + description: Get the activity profile calculated by the haplotypecaller to the given file in IGV format. + - get_assembly_regions: + type: boolean + description: Get the assembly regions calculated by haplotypecaller to the speficied file in IGV format. +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - bam: + type: file + description: Sorted, markdup, optionally BQSR BAM/SAM file + pattern: "*.{bam,sam}" + - metrics: + type: file + description: Optional duplicate metrics file generated by elprep + pattern: "*.{metrics.txt}" + - recall: + type: file + description: Optional recall metrics file generated by elprep + pattern: "*.{recall}" + - gvcf: + type: file + description: Optional GVCF output file + pattern: "*.{vcf.gz}" + - table: + type: file + description: Optional intermediate BQSR table output file + pattern: "*.{table}" + - activity_profile: + type: file + description: Optional activity profile output file + pattern: "*.{activity_profile.igv}" + - assembly_regions: + type: file + description: Optional activity regions output file + pattern: "*.{assembly_regions.igv}" +authors: + - "@matthdsm" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 19b51f3d..c3bf04aa 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -599,6 +599,10 @@ ectyper: - modules/ectyper/** - tests/modules/ectyper/** +elprep/filter: + - modules/elprep/filter/** + - tests/modules/elprep/filter/** + elprep/split: - modules/elprep/split/** - tests/modules/elprep/split/** diff --git a/tests/config/test_data.config b/tests/config/test_data.config index ea123732..559c0d6f 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -112,6 +112,7 @@ params { } 'homo_sapiens' { 'genome' { + genome_elfasta = "${test_data_dir}/genomics/homo_sapiens/genome/genome.elfasta" genome_fasta = "${test_data_dir}/genomics/homo_sapiens/genome/genome.fasta" genome_fasta_fai = "${test_data_dir}/genomics/homo_sapiens/genome/genome.fasta.fai" genome_dict = "${test_data_dir}/genomics/homo_sapiens/genome/genome.dict" @@ -123,6 +124,7 @@ params { genome_header = "${test_data_dir}/genomics/homo_sapiens/genome/genome.header" genome_bed_gz = "${test_data_dir}/genomics/homo_sapiens/genome/genome.bed.gz" genome_bed_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/genome/genome.bed.gz.tbi" + genome_elsites = "${test_data_dir}/genomics/homo_sapiens/genome/genome.elsites" transcriptome_fasta = "${test_data_dir}/genomics/homo_sapiens/genome/transcriptome.fasta" genome2_fasta = "${test_data_dir}/genomics/homo_sapiens/genome/genome2.fasta" genome_chain_gz = "${test_data_dir}/genomics/homo_sapiens/genome/genome.chain.gz" @@ -136,6 +138,7 @@ params { genome_21_multi_interval_bed_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/genome/chr21/sequence/multi_intervals.bed.gz.tbi" genome_21_chromosomes_dir = "${test_data_dir}/genomics/homo_sapiens/genome/chr21/sequence/chromosomes.tar.gz" + dbsnp_146_hg38_elsites = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/dbsnp_146.hg38.elsites" dbsnp_146_hg38_vcf_gz = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/dbsnp_146.hg38.vcf.gz" dbsnp_146_hg38_vcf_gz_tbi = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/dbsnp_146.hg38.vcf.gz.tbi" gnomad_r2_1_1_vcf_gz = "${test_data_dir}/genomics/homo_sapiens/genome/vcf/gnomAD.r2.1.1.vcf.gz" diff --git a/tests/modules/elprep/filter/main.nf b/tests/modules/elprep/filter/main.nf new file mode 100644 index 00000000..0a8d43ca --- /dev/null +++ b/tests/modules/elprep/filter/main.nf @@ -0,0 +1,18 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { ELPREP_FILTER } from '../../../../modules/elprep/filter/main.nf' + +workflow test_elprep_filter { + + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) + ] + reference_elfasta = file(params.test_data['homo_sapiens']['genome']['genome_elfasta'], checkIfExists: true) + known_sites_elsites = file(params.test_data['homo_sapiens']['genome']['dbsnp_146_hg38_elsites'], checkIfExists: true) + target_regions_bed = file(params.test_data['homo_sapiens']['genome']['genome_bed'], checkIfExists: true) + + ELPREP_FILTER ( input, true, true, [], [], reference_elfasta, known_sites_elsites, target_regions_bed, [], [], true, true) +} diff --git a/tests/modules/elprep/filter/nextflow.config b/tests/modules/elprep/filter/nextflow.config new file mode 100644 index 00000000..d53a3d2d --- /dev/null +++ b/tests/modules/elprep/filter/nextflow.config @@ -0,0 +1,7 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + withName: ELPREP_FILTER { + ext.args = "--mark-duplicates " + } +} diff --git a/tests/modules/elprep/filter/test.yml b/tests/modules/elprep/filter/test.yml new file mode 100644 index 00000000..5242045b --- /dev/null +++ b/tests/modules/elprep/filter/test.yml @@ -0,0 +1,13 @@ +- name: elprep filter test_elprep_filter + command: nextflow run tests/modules/elprep/filter -entry test_elprep_filter -c tests/config/nextflow.config + tags: + - elprep + - elprep/filter + files: + - path: output/elprep/test.activity_profile.igv + - path: output/elprep/test.assembly_regions.igv + - path: output/elprep/test.bam + - path: output/elprep/test.g.vcf.gz + - path: output/elprep/test.metrics.txt + - path: output/elprep/test.recall + - path: output/elprep/versions.yml From 538dbac98ba9c8f799536cd5a617195501439457 Mon Sep 17 00:00:00 2001 From: Sofia Stamouli <91951607+sofstam@users.noreply.github.com> Date: Fri, 22 Apr 2022 11:26:30 +0200 Subject: [PATCH 138/283] Kaiju2table module (#1545) * Add kaiju2table module --- modules/kaiju/kaiju2table/main.nf | 40 +++++++++++++++ modules/kaiju/kaiju2table/meta.yml | 50 +++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/kaiju/kaiju2table/main.nf | 21 ++++++++ .../modules/kaiju/kaiju2table/nextflow.config | 5 ++ tests/modules/kaiju/kaiju2table/test.yml | 9 ++++ 6 files changed, 129 insertions(+) create mode 100644 modules/kaiju/kaiju2table/main.nf create mode 100644 modules/kaiju/kaiju2table/meta.yml create mode 100644 tests/modules/kaiju/kaiju2table/main.nf create mode 100644 tests/modules/kaiju/kaiju2table/nextflow.config create mode 100644 tests/modules/kaiju/kaiju2table/test.yml diff --git a/modules/kaiju/kaiju2table/main.nf b/modules/kaiju/kaiju2table/main.nf new file mode 100644 index 00000000..00739d1e --- /dev/null +++ b/modules/kaiju/kaiju2table/main.nf @@ -0,0 +1,40 @@ +process KAIJU_KAIJU2TABLE { + tag "$meta.id" + label 'process_low' + + conda (params.enable_conda ? "bioconda::kaiju=1.8.2" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/kaiju:1.8.2--h5b5514e_1': + 'quay.io/biocontainers/kaiju:1.8.2--h2e03b76_0' }" + + input: + tuple val(meta), path(results) + path db + val taxon_rank + + output: + tuple val(meta), path('*.txt'), emit: summary + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + """ + dbnodes=`find -L ${db} -name "*nodes.dmp"` + dbname=`find -L ${db} -name "*.fmi" -not -name "._*"` + kaiju2table $args \\ + -t \$dbnodes \\ + -n \$dbname \\ + -r ${taxon_rank} \\ + -o ${prefix}.txt \\ + ${results} + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + kaiju: \$(echo \$( kaiju -h 2>&1 | sed -n 1p | sed 's/^.*Kaiju //' )) + END_VERSIONS + """ +} diff --git a/modules/kaiju/kaiju2table/meta.yml b/modules/kaiju/kaiju2table/meta.yml new file mode 100644 index 00000000..bc3e85d7 --- /dev/null +++ b/modules/kaiju/kaiju2table/meta.yml @@ -0,0 +1,50 @@ +name: "kaiju_kaiju2table" +description: write your description here +keywords: + - classify + - metagenomics +tools: + - kaiju: + description: Fast and sensitive taxonomic classification for metagenomics + homepage: https://kaiju.binf.ku.dk/ + documentation: https://github.com/bioinformatics-centre/kaiju/blob/master/README.md + tool_dev_url: https://github.com/bioinformatics-centre/kaiju + doi: "10.1038/ncomms11257" + licence: ["GNU GPL v3"] + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - results: + type: file + description: File containing the kaiju classification results + pattern: "*.{txt}" + - taxon_rank: + type: string + description: | + Taxonomic rank to display in report + pattern: "phylum|class|order|family|genus|species" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - results: + type: file + description: | + Summary table for a given taxonomic rank + pattern: "*.{tsv}" + +authors: + - "@sofstam" + - "@talnor" + - "@jfy133" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index c3bf04aa..a1a969e7 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -1009,6 +1009,10 @@ kaiju/kaiju: - modules/kaiju/kaiju/** - tests/modules/kaiju/kaiju/** +kaiju/kaiju2table: + - modules/kaiju/kaiju2table/** + - tests/modules/kaiju/kaiju2table/** + kallisto/index: - modules/kallisto/index/** - tests/modules/kallisto/index/** diff --git a/tests/modules/kaiju/kaiju2table/main.nf b/tests/modules/kaiju/kaiju2table/main.nf new file mode 100644 index 00000000..b7169ba5 --- /dev/null +++ b/tests/modules/kaiju/kaiju2table/main.nf @@ -0,0 +1,21 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { UNTAR } from '../../../../modules/untar/main.nf' +include { KAIJU_KAIJU } from '../../../../modules/kaiju/kaiju/main.nf' +include { KAIJU_KAIJU2TABLE } from '../../../../modules/kaiju/kaiju2table/main.nf' + +workflow test_kaiju_kaiju_single_end { + + input = [ + [ id:'test', single_end:true ], // meta map + file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true) + ] + db = [ [], file(params.test_data['sarscov2']['genome']['kaiju_tar_gz'], checkIfExists: true) ] + taxon_rank = "species" + + ch_db = UNTAR ( db ) + KAIJU_KAIJU ( input, ch_db.untar.map{ it[1] } ) + KAIJU_KAIJU2TABLE ( KAIJU_KAIJU.out.results, ch_db.untar.map{ it[1] }, taxon_rank ) +} diff --git a/tests/modules/kaiju/kaiju2table/nextflow.config b/tests/modules/kaiju/kaiju2table/nextflow.config new file mode 100644 index 00000000..50f50a7a --- /dev/null +++ b/tests/modules/kaiju/kaiju2table/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} \ No newline at end of file diff --git a/tests/modules/kaiju/kaiju2table/test.yml b/tests/modules/kaiju/kaiju2table/test.yml new file mode 100644 index 00000000..47d99c89 --- /dev/null +++ b/tests/modules/kaiju/kaiju2table/test.yml @@ -0,0 +1,9 @@ +- name: kaiju kaiju2table test_kaiju_kaiju_single_end + command: nextflow run tests/modules/kaiju/kaiju2table -entry test_kaiju_kaiju_single_end -c tests/config/nextflow.config + tags: + - kaiju + - kaiju/kaiju2table + files: + - path: output/kaiju/test.txt + md5sum: 0d9f8fd36fcf2888296ae12632c5f0a8 + - path: output/kaiju/versions.yml From 8dad38afc7f28db49c38a23deb7abfeca2ee3bc7 Mon Sep 17 00:00:00 2001 From: CMGG ICT Team Date: Fri, 22 Apr 2022 11:35:14 +0200 Subject: [PATCH 139/283] fix test.yml --- tests/modules/elprep/merge/test.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/tests/modules/elprep/merge/test.yml b/tests/modules/elprep/merge/test.yml index 26c16f59..d6037bec 100644 --- a/tests/modules/elprep/merge/test.yml +++ b/tests/modules/elprep/merge/test.yml @@ -1,8 +1,8 @@ -- name: "elprep merge" - command: nextflow run ./tests/modules/elprep/merge -entry test_elprep_merge -c ./tests/config/nextflow.config -c ./tests/modules/elprep/merge/nextflow.config +- name: elprep merge test_elprep_merge + command: nextflow run tests/modules/elprep/merge -entry test_elprep_merge -c tests/config/nextflow.config tags: - - "elprep" - - "elprep/merge" + - elprep + - elprep/merge files: - - path: "output/elprep/test.bam" + - path: output/elprep/test.bam - path: output/elprep/versions.yml From c4072f274cdf91601c895b206f2011e0b4b809df Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Fri, 22 Apr 2022 11:39:22 +0200 Subject: [PATCH 140/283] fix formatting, fix code style --- modules/elprep/merge/main.nf | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/modules/elprep/merge/main.nf b/modules/elprep/merge/main.nf index 00ed4d60..2b12bfe2 100644 --- a/modules/elprep/merge/main.nf +++ b/modules/elprep/merge/main.nf @@ -18,12 +18,10 @@ process ELPREP_MERGE { task.ext.when == null || task.ext.when script: - def args = task.ext.args ?: '' - def prefix = task.ext.prefix ?: "${meta.id}" - if (meta.single_end) { - args += " --single-end" - } - def suffix = args.contains("--output-type sam") ? "sam" : "bam" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def suffix = args.contains("--output-type sam") ? "sam" : "bam" + def single_end = meta.single_end ? " --single-end" : "" """ # create directory and move all input so elprep can find and merge them before splitting @@ -34,6 +32,7 @@ process ELPREP_MERGE { input \\ ${prefix}.${suffix} \\ $args \\ + ${single_end} \\ --nr-of-threads $task.cpus cat <<-END_VERSIONS > versions.yml From 2f6382168cac2ca8a9ff9d53c868fbde1a0ca5e1 Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Fri, 22 Apr 2022 11:51:54 +0200 Subject: [PATCH 141/283] update meta.yml --- modules/elprep/merge/meta.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/modules/elprep/merge/meta.yml b/modules/elprep/merge/meta.yml index e157fddb..1f49b1a0 100644 --- a/modules/elprep/merge/meta.yml +++ b/modules/elprep/merge/meta.yml @@ -7,10 +7,10 @@ keywords: tools: - "elprep": description: "elPrep is a high-performance tool for preparing .sam/.bam files for variant calling in sequencing pipelines. It can be used as a drop-in replacement for SAMtools/Picard/GATK4." - homepage: "None" - documentation: "None" - tool_dev_url: "None" - doi: "" + homepage: "https://github.com/ExaScience/elprep" + documentation: "https://github.com/ExaScience/elprep" + tool_dev_url: "https://github.com/ExaScience/elprep" + doi: "10.1371/journal.pone.0244471" licence: "['AGPL v3']" input: From fefffb9bb0776e679c2297e8d80325d57e2f2d7a Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Fri, 22 Apr 2022 12:36:32 +0200 Subject: [PATCH 142/283] fix output glob --- modules/elprep/merge/main.nf | 6 +++--- tests/modules/elprep/merge/test.yml | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/modules/elprep/merge/main.nf b/modules/elprep/merge/main.nf index 2b12bfe2..28fa9985 100644 --- a/modules/elprep/merge/main.nf +++ b/modules/elprep/merge/main.nf @@ -11,8 +11,8 @@ process ELPREP_MERGE { tuple val(meta), path(bam) output: - tuple val(meta), path("**.{bam,sam}") , emit: bam - path "versions.yml" , emit: versions + tuple val(meta), path("output/**.{bam,sam}") , emit: bam + path "versions.yml" , emit: versions when: task.ext.when == null || task.ext.when @@ -30,7 +30,7 @@ process ELPREP_MERGE { elprep merge \\ input \\ - ${prefix}.${suffix} \\ + output/${prefix}.${suffix} \\ $args \\ ${single_end} \\ --nr-of-threads $task.cpus diff --git a/tests/modules/elprep/merge/test.yml b/tests/modules/elprep/merge/test.yml index d6037bec..ad2ecfef 100644 --- a/tests/modules/elprep/merge/test.yml +++ b/tests/modules/elprep/merge/test.yml @@ -4,5 +4,5 @@ - elprep - elprep/merge files: - - path: output/elprep/test.bam + - path: output/elprep/output/test.bam - path: output/elprep/versions.yml From b1749445d76d12d9961e687e811af1337f0eff0f Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Fri, 22 Apr 2022 12:50:17 +0200 Subject: [PATCH 143/283] fix output glob (#1551) --- modules/elprep/split/main.nf | 11 ++++++----- tests/modules/elprep/split/test.yml | 6 +++--- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/modules/elprep/split/main.nf b/modules/elprep/split/main.nf index 8af558d4..639944ef 100644 --- a/modules/elprep/split/main.nf +++ b/modules/elprep/split/main.nf @@ -11,16 +11,16 @@ process ELPREP_SPLIT { tuple val(meta), path(bam) output: - tuple val(meta), path("**.{bam,sam}"), emit: bam + tuple val(meta), path("output/**.{bam,sam}"), emit: bam path "versions.yml" , emit: versions when: task.ext.when == null || task.ext.when script: - def args = task.ext.args ?: '' - def prefix = task.ext.prefix ?: "${meta.id}" - meta.single_end ? args += " --single-end": "" + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + def single_end = meta.single_end ? " --single-end": "" """ # create directory and move all input so elprep can find and merge them before splitting @@ -31,8 +31,9 @@ process ELPREP_SPLIT { elprep split \\ input \\ - . \\ + output/ \\ $args \\ + $single_end \\ --nr-of-threads $task.cpus \\ --output-prefix $prefix diff --git a/tests/modules/elprep/split/test.yml b/tests/modules/elprep/split/test.yml index 7ba139b1..2de3f99b 100644 --- a/tests/modules/elprep/split/test.yml +++ b/tests/modules/elprep/split/test.yml @@ -4,7 +4,7 @@ - elprep - elprep/split files: - - path: output/elprep/splits/test-group00001.bam - - path: output/elprep/splits/test-unmapped.bam - - path: output/elprep/test-spread.bam + - path: output/elprep/output/splits/test-group00001.bam + - path: output/elprep/output/splits/test-unmapped.bam + - path: output/elprep/output/test-spread.bam - path: output/elprep/versions.yml From c17d1a7a7b473e103ddd3f28bd91b79733aa7cf2 Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Fri, 22 Apr 2022 13:04:41 +0200 Subject: [PATCH 144/283] fix output glob (#1552) --- modules/elprep/filter/main.nf | 4 ++-- tests/modules/elprep/filter/test.yml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/modules/elprep/filter/main.nf b/modules/elprep/filter/main.nf index 02c93186..5ca288bc 100644 --- a/modules/elprep/filter/main.nf +++ b/modules/elprep/filter/main.nf @@ -23,7 +23,7 @@ process ELPREP_FILTER { output: - tuple val(meta), path("**.{bam,sam}") ,emit: bam + tuple val(meta), path("output/**.{bam,sam}") ,emit: bam tuple val(meta), path("*.metrics.txt") ,optional: true, emit: metrics tuple val(meta), path("*.recall") ,optional: true, emit: recall tuple val(meta), path("*.vcf.gz") ,optional: true, emit: gvcf @@ -65,7 +65,7 @@ process ELPREP_FILTER { def assembly_regions_cmd = get_assembly_regions ? " --assembly-regions ${prefix}.assembly_regions.igv": "" """ - elprep filter ${bam} ${prefix}.${suffix} \\ + elprep filter ${bam} output/${prefix}.${suffix} \\ ${reference_sequences_cmd} \\ ${filter_regions_cmd} \\ ${markdup_cmd} \\ diff --git a/tests/modules/elprep/filter/test.yml b/tests/modules/elprep/filter/test.yml index 5242045b..922d7a9b 100644 --- a/tests/modules/elprep/filter/test.yml +++ b/tests/modules/elprep/filter/test.yml @@ -6,7 +6,7 @@ files: - path: output/elprep/test.activity_profile.igv - path: output/elprep/test.assembly_regions.igv - - path: output/elprep/test.bam + - path: output/elprep/output/test.bam - path: output/elprep/test.g.vcf.gz - path: output/elprep/test.metrics.txt - path: output/elprep/test.recall From 9a2dad935cad22789380f94b67a44181e89e4392 Mon Sep 17 00:00:00 2001 From: CMGG ICT Team Date: Fri, 22 Apr 2022 13:13:39 +0200 Subject: [PATCH 145/283] update config --- tests/modules/elprep/merge/nextflow.config | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/modules/elprep/merge/nextflow.config b/tests/modules/elprep/merge/nextflow.config index 50f50a7a..4e4570f4 100644 --- a/tests/modules/elprep/merge/nextflow.config +++ b/tests/modules/elprep/merge/nextflow.config @@ -1,5 +1,5 @@ process { - - publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } - -} \ No newline at end of file + withName : ELPREP_MERGE { + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + } +} From 413980e93a9cb8ba51f430d8a794cb25c43e9305 Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Fri, 22 Apr 2022 14:33:16 +0200 Subject: [PATCH 146/283] make use of dependent on input array size --- modules/bamtools/split/main.nf | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/modules/bamtools/split/main.nf b/modules/bamtools/split/main.nf index f72d13be..7d69a824 100644 --- a/modules/bamtools/split/main.nf +++ b/modules/bamtools/split/main.nf @@ -9,7 +9,6 @@ process BAMTOOLS_SPLIT { input: tuple val(meta), path(bam) - path(bam_list) output: tuple val(meta), path("*.bam"), emit: bam @@ -21,18 +20,22 @@ process BAMTOOLS_SPLIT { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" + def input_list = bam.collect{"-in $it"}.join(' ') - if (bam_list) { - input_list += " -list $bam_list" - } - if (!args.contains("-stub")) { - args += " -stub ${prefix}" + def stub_cmd = !args.contains("-stub") : " -stub ${prefix}" : "" + + if (size(bam) > 1){ + def bamtools_merge_cmd = "bamtools merge ${input_list} |" + } else { + def split_input = input_list } + """ - bamtools merge \\ - ${input_list} \\ - | bamtools split \\ + ${bamtools_merge_cmd} \\ + bamtools split \\ + $split_input \\ + $stub_cmd \\ $args cat <<-END_VERSIONS > versions.yml From d617a50371abf87bdb376844926088c478ae3f51 Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Fri, 22 Apr 2022 14:39:25 +0200 Subject: [PATCH 147/283] add extra test --- tests/modules/bamtools/split/main.nf | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/tests/modules/bamtools/split/main.nf b/tests/modules/bamtools/split/main.nf index 9f30c4f9..2fc16d67 100644 --- a/tests/modules/bamtools/split/main.nf +++ b/tests/modules/bamtools/split/main.nf @@ -4,11 +4,21 @@ nextflow.enable.dsl = 2 include { BAMTOOLS_SPLIT } from '../../../../modules/bamtools/split/main.nf' -workflow test_bamtools_split { +workflow test_bamtools_split_single_input { input = [ [ id:'test', single_end:false ], // meta map file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) ] - BAMTOOLS_SPLIT ( input, [] ) + BAMTOOLS_SPLIT ( input ) +} + +workflow test_bamtools_split_multiple_input { + + input = [ + [ id:'test', single_end:false ], // meta map + [file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_bam'], checkIfExists: true)] + [ + + BAMTOOLS_SPLIT ( input ) } From d293cffc6a3beb7f1ef9bab6e95ea421f622b2a5 Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Fri, 22 Apr 2022 14:42:43 +0200 Subject: [PATCH 148/283] update tests --- tests/modules/bamtools/split/main.nf | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/tests/modules/bamtools/split/main.nf b/tests/modules/bamtools/split/main.nf index 2fc16d67..ef4b9d59 100644 --- a/tests/modules/bamtools/split/main.nf +++ b/tests/modules/bamtools/split/main.nf @@ -2,15 +2,17 @@ nextflow.enable.dsl = 2 -include { BAMTOOLS_SPLIT } from '../../../../modules/bamtools/split/main.nf' +include { BAMTOOLS_SPLIT as BAMTOOLS_SPLIT_SINGLE } from '../../../../modules/bamtools/split/main.nf' +include { BAMTOOLS_SPLIT as BAMTOOLS_SPLIT_MULTIPLE } from '../../../../modules/bamtools/split/main.nf' workflow test_bamtools_split_single_input { input = [ [ id:'test', single_end:false ], // meta map - file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) ] + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) + ] - BAMTOOLS_SPLIT ( input ) + BAMTOOLS_SPLIT_SINGLE ( input ) } workflow test_bamtools_split_multiple_input { @@ -20,5 +22,5 @@ workflow test_bamtools_split_multiple_input { [file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_bam'], checkIfExists: true)] [ - BAMTOOLS_SPLIT ( input ) + BAMTOOLS_SPLIT_MULTIPLE ( input ) } From 7e883da90460f77b3745468452b4c549327dde13 Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Fri, 22 Apr 2022 14:50:50 +0200 Subject: [PATCH 149/283] Revert "make use of dependent on input array size" This reverts commit 413980e93a9cb8ba51f430d8a794cb25c43e9305. --- modules/bamtools/split/main.nf | 21 +++++++++------------ 1 file changed, 9 insertions(+), 12 deletions(-) diff --git a/modules/bamtools/split/main.nf b/modules/bamtools/split/main.nf index 7d69a824..f72d13be 100644 --- a/modules/bamtools/split/main.nf +++ b/modules/bamtools/split/main.nf @@ -9,6 +9,7 @@ process BAMTOOLS_SPLIT { input: tuple val(meta), path(bam) + path(bam_list) output: tuple val(meta), path("*.bam"), emit: bam @@ -20,22 +21,18 @@ process BAMTOOLS_SPLIT { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" - def input_list = bam.collect{"-in $it"}.join(' ') - def stub_cmd = !args.contains("-stub") : " -stub ${prefix}" : "" - - if (size(bam) > 1){ - def bamtools_merge_cmd = "bamtools merge ${input_list} |" - } else { - def split_input = input_list + if (bam_list) { + input_list += " -list $bam_list" + } + if (!args.contains("-stub")) { + args += " -stub ${prefix}" } - """ - ${bamtools_merge_cmd} \\ - bamtools split \\ - $split_input \\ - $stub_cmd \\ + bamtools merge \\ + ${input_list} \\ + | bamtools split \\ $args cat <<-END_VERSIONS > versions.yml From b4af3f1475b91d0c411e901d6126515babcdafec Mon Sep 17 00:00:00 2001 From: CMGG ICT Team Date: Fri, 22 Apr 2022 15:02:41 +0200 Subject: [PATCH 150/283] test fixes --- modules/bamtools/split/main.nf | 9 ++------- tests/modules/bamtools/split/main.nf | 9 ++++++--- 2 files changed, 8 insertions(+), 10 deletions(-) diff --git a/modules/bamtools/split/main.nf b/modules/bamtools/split/main.nf index f72d13be..7184261e 100644 --- a/modules/bamtools/split/main.nf +++ b/modules/bamtools/split/main.nf @@ -9,7 +9,6 @@ process BAMTOOLS_SPLIT { input: tuple val(meta), path(bam) - path(bam_list) output: tuple val(meta), path("*.bam"), emit: bam @@ -22,17 +21,13 @@ process BAMTOOLS_SPLIT { def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" def input_list = bam.collect{"-in $it"}.join(' ') - if (bam_list) { - input_list += " -list $bam_list" - } - if (!args.contains("-stub")) { - args += " -stub ${prefix}" - } + def stub = !args.contains("-stub") ? "-stub ${prefix}" : "" """ bamtools merge \\ ${input_list} \\ | bamtools split \\ + $stub \\ $args cat <<-END_VERSIONS > versions.yml diff --git a/tests/modules/bamtools/split/main.nf b/tests/modules/bamtools/split/main.nf index ef4b9d59..ff0cb748 100644 --- a/tests/modules/bamtools/split/main.nf +++ b/tests/modules/bamtools/split/main.nf @@ -15,12 +15,15 @@ workflow test_bamtools_split_single_input { BAMTOOLS_SPLIT_SINGLE ( input ) } -workflow test_bamtools_split_multiple_input { +workflow test_bamtools_split_multiple { input = [ [ id:'test', single_end:false ], // meta map - [file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_bam'], checkIfExists: true)] - [ + [file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true)] + + ] BAMTOOLS_SPLIT_MULTIPLE ( input ) } + From 3bcae8e71e705f289ec3786e7b6255eb5e0d56af Mon Sep 17 00:00:00 2001 From: CMGG ICT Team Date: Fri, 22 Apr 2022 15:19:52 +0200 Subject: [PATCH 151/283] fix testing errors --- tests/modules/bamtools/split/main.nf | 2 +- tests/modules/bamtools/split/test.yml | 16 ++++++++++++++-- 2 files changed, 15 insertions(+), 3 deletions(-) diff --git a/tests/modules/bamtools/split/main.nf b/tests/modules/bamtools/split/main.nf index ff0cb748..9787b4ed 100644 --- a/tests/modules/bamtools/split/main.nf +++ b/tests/modules/bamtools/split/main.nf @@ -20,7 +20,7 @@ workflow test_bamtools_split_multiple { input = [ [ id:'test', single_end:false ], // meta map [file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), - file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true)] + file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_bam'], checkIfExists: true)] ] diff --git a/tests/modules/bamtools/split/test.yml b/tests/modules/bamtools/split/test.yml index b52cc9ee..63ecac57 100644 --- a/tests/modules/bamtools/split/test.yml +++ b/tests/modules/bamtools/split/test.yml @@ -1,5 +1,5 @@ -- name: bamtools split test_bamtools_split - command: nextflow run tests/modules/bamtools/split -entry test_bamtools_split -c tests/config/nextflow.config +- name: bamtools split test_bamtools_split_single_input + command: nextflow run tests/modules/bamtools/split -entry test_bamtools_split_single_input -c tests/config/nextflow.config tags: - bamtools - bamtools/split @@ -9,3 +9,15 @@ - path: output/bamtools/test.REF_unmapped.bam md5sum: e0754bf72c51543b2d745d96537035fb - path: output/bamtools/versions.yml + +- name: bamtools split test_bamtools_split_multiple + command: nextflow run tests/modules/bamtools/split -entry test_bamtools_split_multiple -c tests/config/nextflow.config + tags: + - bamtools + - bamtools/split + files: + - path: output/bamtools/test.REF_chr22.bam + md5sum: 585675bea34c48ebe9db06a561d4b4fa + - path: output/bamtools/test.REF_unmapped.bam + md5sum: 16ad644c87b9471f3026bc87c98b4963 + - path: output/bamtools/versions.yml From 9aadd9a6d3f5964476582319b3a1c54a3e3fe7c9 Mon Sep 17 00:00:00 2001 From: Harshil Patel Date: Fri, 22 Apr 2022 14:50:07 +0100 Subject: [PATCH 152/283] Replace vanilla Linux Biocontainer with Ubuntu (#1557) --- modules/cat/fastq/main.nf | 4 ++-- modules/gunzip/main.nf | 4 ++-- modules/untar/main.nf | 6 +++--- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/modules/cat/fastq/main.nf b/modules/cat/fastq/main.nf index bf0877c3..b6854895 100644 --- a/modules/cat/fastq/main.nf +++ b/modules/cat/fastq/main.nf @@ -4,8 +4,8 @@ process CAT_FASTQ { conda (params.enable_conda ? "conda-forge::sed=4.7" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://containers.biocontainers.pro/s3/SingImgsRepo/biocontainers/v1.2.0_cv1/biocontainers_v1.2.0_cv1.img' : - 'biocontainers/biocontainers:v1.2.0_cv1' }" + 'https://depot.galaxyproject.org/singularity/ubuntu:20.04' : + 'ubuntu:20.04' }" input: tuple val(meta), path(reads, stageAs: "input*/*") diff --git a/modules/gunzip/main.nf b/modules/gunzip/main.nf index 9d4b0666..61bf1afa 100644 --- a/modules/gunzip/main.nf +++ b/modules/gunzip/main.nf @@ -4,8 +4,8 @@ process GUNZIP { conda (params.enable_conda ? "conda-forge::sed=4.7" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://containers.biocontainers.pro/s3/SingImgsRepo/biocontainers/v1.2.0_cv1/biocontainers_v1.2.0_cv1.img' : - 'biocontainers/biocontainers:v1.2.0_cv1' }" + 'https://depot.galaxyproject.org/singularity/ubuntu:20.04' : + 'ubuntu:20.04' }" input: tuple val(meta), path(archive) diff --git a/modules/untar/main.nf b/modules/untar/main.nf index bbfa0bfe..058d1764 100644 --- a/modules/untar/main.nf +++ b/modules/untar/main.nf @@ -2,10 +2,10 @@ process UNTAR { tag "$archive" label 'process_low' - conda (params.enable_conda ? "conda-forge::tar=1.34" : null) + conda (params.enable_conda ? "conda-forge::sed=4.7" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://containers.biocontainers.pro/s3/SingImgsRepo/biocontainers/v1.2.0_cv2/biocontainers_v1.2.0_cv2.img' : - 'biocontainers/biocontainers:v1.2.0_cv2' }" + 'https://depot.galaxyproject.org/singularity/ubuntu:20.04' : + 'ubuntu:20.04' }" input: tuple val(meta), path(archive) From 6d88f2da8cc5d586456e801b535cc4213e0fa2f7 Mon Sep 17 00:00:00 2001 From: Harshil Patel Date: Fri, 22 Apr 2022 15:13:06 +0100 Subject: [PATCH 153/283] Rename process from STRINGTIE to STRINGTIE_STRINGTIE (#1546) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Rename process from STRINGTIE to STRINGTIE_STRINGTIE * Bump Stringtie version to 2.2.1 and remove empty files in tests * Fix tests for stringtie/merge Co-authored-by: Júlia Mir Pedrol --- modules/stringtie/merge/main.nf | 6 +++--- modules/stringtie/stringtie/main.nf | 8 ++++---- modules/stringtie/stringtie/meta.yml | 2 +- tests/modules/stringtie/merge/main.nf | 12 ++++++------ tests/modules/stringtie/merge/test.yml | 14 ++++++-------- tests/modules/stringtie/stringtie/main.nf | 6 +++--- tests/modules/stringtie/stringtie/test.yml | 2 -- 7 files changed, 23 insertions(+), 27 deletions(-) diff --git a/modules/stringtie/merge/main.nf b/modules/stringtie/merge/main.nf index aa11eb36..c8460c94 100644 --- a/modules/stringtie/merge/main.nf +++ b/modules/stringtie/merge/main.nf @@ -2,10 +2,10 @@ process STRINGTIE_MERGE { label 'process_medium' // Note: 2.7X indices incompatible with AWS iGenomes. - conda (params.enable_conda ? "bioconda::stringtie=2.1.7" : null) + conda (params.enable_conda ? "bioconda::stringtie=2.2.1" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/stringtie:2.1.7--h978d192_0' : - 'quay.io/biocontainers/stringtie:2.1.7--h978d192_0' }" + 'https://depot.galaxyproject.org/singularity/stringtie:2.2.1--hecb563c_2' : + 'quay.io/biocontainers/stringtie:2.2.1--hecb563c_2' }" input: path stringtie_gtf diff --git a/modules/stringtie/stringtie/main.nf b/modules/stringtie/stringtie/main.nf index f37e347a..c70c9819 100644 --- a/modules/stringtie/stringtie/main.nf +++ b/modules/stringtie/stringtie/main.nf @@ -1,11 +1,11 @@ -process STRINGTIE { +process STRINGTIE_STRINGTIE { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? "bioconda::stringtie=2.1.7" : null) + conda (params.enable_conda ? "bioconda::stringtie=2.2.1" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/stringtie:2.1.7--h978d192_0' : - 'quay.io/biocontainers/stringtie:2.1.7--h978d192_0' }" + 'https://depot.galaxyproject.org/singularity/stringtie:2.2.1--hecb563c_2' : + 'quay.io/biocontainers/stringtie:2.2.1--hecb563c_2' }" input: tuple val(meta), path(bam) diff --git a/modules/stringtie/stringtie/meta.yml b/modules/stringtie/stringtie/meta.yml index a462c574..0dda84d0 100644 --- a/modules/stringtie/stringtie/meta.yml +++ b/modules/stringtie/stringtie/meta.yml @@ -1,4 +1,4 @@ -name: stringtie +name: stringtie_stringtie description: Transcript assembly and quantification for RNA-Se keywords: - transcript diff --git a/tests/modules/stringtie/merge/main.nf b/tests/modules/stringtie/merge/main.nf index 7851e755..3fe32902 100644 --- a/tests/modules/stringtie/merge/main.nf +++ b/tests/modules/stringtie/merge/main.nf @@ -2,8 +2,8 @@ nextflow.enable.dsl = 2 -include { STRINGTIE } from '../../../../modules/stringtie/stringtie/main.nf' -include { STRINGTIE_MERGE } from '../../../../modules/stringtie/merge/main.nf' +include { STRINGTIE_STRINGTIE } from '../../../../modules/stringtie/stringtie/main.nf' +include { STRINGTIE_MERGE } from '../../../../modules/stringtie/merge/main.nf' /* * Test with forward strandedness @@ -15,8 +15,8 @@ workflow test_stringtie_forward_merge { ] annotation_gtf = file(params.test_data['homo_sapiens']['genome']['genome_gtf'], checkIfExists: true) - STRINGTIE ( input, annotation_gtf ) - STRINGTIE + STRINGTIE_STRINGTIE ( input, annotation_gtf ) + STRINGTIE_STRINGTIE .out .transcript_gtf .map { it -> it[1] } @@ -35,8 +35,8 @@ workflow test_stringtie_reverse_merge { ] annotation_gtf = file(params.test_data['homo_sapiens']['genome']['genome_gtf'], checkIfExists: true) - STRINGTIE ( input, annotation_gtf ) - STRINGTIE + STRINGTIE_STRINGTIE ( input, annotation_gtf ) + STRINGTIE_STRINGTIE .out .transcript_gtf .map { it -> it[1] } diff --git a/tests/modules/stringtie/merge/test.yml b/tests/modules/stringtie/merge/test.yml index 392a1d7c..fca66447 100644 --- a/tests/modules/stringtie/merge/test.yml +++ b/tests/modules/stringtie/merge/test.yml @@ -5,7 +5,7 @@ - stringtie/merge files: - path: output/stringtie/stringtie.merged.gtf - md5sum: 9fab7049ef2eafdea246fc787d1def40 + md5sum: d959eb2fab0db48ded7275e0a2e83c05 - path: output/stringtie/test.ballgown/e2t.ctab md5sum: 9ae42e056c955a88a883e5e917840d77 - path: output/stringtie/test.ballgown/e_data.ctab @@ -17,11 +17,10 @@ - path: output/stringtie/test.ballgown/t_data.ctab md5sum: 92a98902784e7406ffe054d2adbabc7c - path: output/stringtie/test.coverage.gtf - md5sum: d41d8cd98f00b204e9800998ecf8427e - path: output/stringtie/test.gene.abundance.txt - md5sum: 9708811bcefe0f6384293d6f419f3250 + md5sum: 8bcd8e2730ed3337e2730186dbc184f3 - path: output/stringtie/test.transcripts.gtf - md5sum: 0e42709bfe30c2c7f2574ba664f5fa9f + md5sum: a914bd55b68a4b5f607738b17861e362 - name: stringtie merge test_stringtie_reverse_merge command: nextflow run ./tests/modules/stringtie/merge -entry test_stringtie_reverse_merge -c ./tests/config/nextflow.config -c ./tests/modules/stringtie/merge/nextflow.config @@ -30,7 +29,7 @@ - stringtie/merge files: - path: output/stringtie/stringtie.merged.gtf - md5sum: afc461bb3cbc368f268a7a45c1b54497 + md5sum: 6da479298d73d5b3216d4e1576a2bdf4 - path: output/stringtie/test.ballgown/e2t.ctab md5sum: 9ae42e056c955a88a883e5e917840d77 - path: output/stringtie/test.ballgown/e_data.ctab @@ -42,8 +41,7 @@ - path: output/stringtie/test.ballgown/t_data.ctab md5sum: 92a98902784e7406ffe054d2adbabc7c - path: output/stringtie/test.coverage.gtf - md5sum: d41d8cd98f00b204e9800998ecf8427e - path: output/stringtie/test.gene.abundance.txt - md5sum: 94b85145d60ab1b80a7f0f6cf08418b0 + md5sum: f289f41b3ba1b9f0aa05d14408f1a5da - path: output/stringtie/test.transcripts.gtf - md5sum: 3196e3d50fd461aae6408e0a70acae68 + md5sum: 9dcdc9577c0fdbb25089eda210267546 diff --git a/tests/modules/stringtie/stringtie/main.nf b/tests/modules/stringtie/stringtie/main.nf index ae6abe67..463e4b98 100644 --- a/tests/modules/stringtie/stringtie/main.nf +++ b/tests/modules/stringtie/stringtie/main.nf @@ -2,7 +2,7 @@ nextflow.enable.dsl = 2 -include { STRINGTIE } from '../../../../modules/stringtie/stringtie/main.nf' +include { STRINGTIE_STRINGTIE } from '../../../../modules/stringtie/stringtie/main.nf' // // Test with forward strandedness // @@ -13,7 +13,7 @@ workflow test_stringtie_forward { ] annotation_gtf = file(params.test_data['sarscov2']['genome']['genome_gtf'], checkIfExists: true) - STRINGTIE ( input, annotation_gtf ) + STRINGTIE_STRINGTIE ( input, annotation_gtf ) } // @@ -26,5 +26,5 @@ workflow test_stringtie_reverse { ] annotation_gtf = file(params.test_data['sarscov2']['genome']['genome_gtf'], checkIfExists: true) - STRINGTIE ( input, annotation_gtf ) + STRINGTIE_STRINGTIE ( input, annotation_gtf ) } diff --git a/tests/modules/stringtie/stringtie/test.yml b/tests/modules/stringtie/stringtie/test.yml index 732b9fd1..2815ba81 100644 --- a/tests/modules/stringtie/stringtie/test.yml +++ b/tests/modules/stringtie/stringtie/test.yml @@ -8,7 +8,6 @@ - path: ./output/stringtie/test.gene.abundance.txt md5sum: 7d8bce7f2a922e367cedccae7267c22e - path: ./output/stringtie/test.coverage.gtf - md5sum: d41d8cd98f00b204e9800998ecf8427e - path: ./output/stringtie/test.ballgown/e_data.ctab md5sum: 6b4cf69bc03f3f69890f972a0e8b7471 - path: ./output/stringtie/test.ballgown/i_data.ctab @@ -30,7 +29,6 @@ - path: ./output/stringtie/test.gene.abundance.txt md5sum: 7385b870b955dae2c2ab78a70cf05cce - path: ./output/stringtie/test.coverage.gtf - md5sum: d41d8cd98f00b204e9800998ecf8427e - path: ./output/stringtie/test.ballgown/e_data.ctab md5sum: 879b6696029d19c4737b562e9d149218 - path: ./output/stringtie/test.ballgown/i_data.ctab From b3e56c83c683111d0d3edfa2821454d6132b6b66 Mon Sep 17 00:00:00 2001 From: Jose Espinosa-Carrasco Date: Fri, 22 Apr 2022 16:29:49 +0200 Subject: [PATCH 154/283] Remove task.cpus from the command (#1559) --- modules/phantompeakqualtools/main.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/phantompeakqualtools/main.nf b/modules/phantompeakqualtools/main.nf index f584cb65..0362b2e7 100644 --- a/modules/phantompeakqualtools/main.nf +++ b/modules/phantompeakqualtools/main.nf @@ -26,7 +26,7 @@ process PHANTOMPEAKQUALTOOLS { def prefix = task.ext.prefix ?: "${meta.id}" """ RUN_SPP=`which run_spp.R` - Rscript $args -e "library(caTools); source(\\"\$RUN_SPP\\")" -c="$bam" -savp="${prefix}.spp.pdf" -savd="${prefix}.spp.Rdata" -out="${prefix}.spp.out" -p=$task.cpus + Rscript $args -e "library(caTools); source(\\"\$RUN_SPP\\")" -c="$bam" -savp="${prefix}.spp.pdf" -savd="${prefix}.spp.Rdata" -out="${prefix}.spp.out" cat <<-END_VERSIONS > versions.yml "${task.process}": From 99576895682ee210065be35596b28f308aba068d Mon Sep 17 00:00:00 2001 From: Jasmin F <73216762+jasmezz@users.noreply.github.com> Date: Fri, 22 Apr 2022 17:51:18 +0200 Subject: [PATCH 155/283] Add missing bacteroides_fragilis genome.gbff.gz (#1558) Co-authored-by: James A. Fellows Yates --- tests/config/test_data.config | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/config/test_data.config b/tests/config/test_data.config index 559c0d6f..f6ea242d 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -335,6 +335,7 @@ params { 'bacteroides_fragilis' { 'genome' { genome_fna_gz = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/genome/genome.fna.gz" + genome_gbff_gz = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/genome/genome.gbff.gz" genome_paf = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/genome/genome.paf" genome_mapping_potential_arg = "${test_data_dir}/genomics/prokaryotes/bacteroides_fragilis/genome/genome.mapping.potential.ARG" From 2a2ac290c37fbbd6caa41b06993818491131e0f2 Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Fri, 22 Apr 2022 19:34:00 +0200 Subject: [PATCH 156/283] Update modules/bamtools/split/meta.yml Co-authored-by: Harshil Patel --- modules/bamtools/split/meta.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/bamtools/split/meta.yml b/modules/bamtools/split/meta.yml index 0fd5d5ca..badc6974 100644 --- a/modules/bamtools/split/meta.yml +++ b/modules/bamtools/split/meta.yml @@ -23,7 +23,7 @@ input: e.g. [ id:'test', single_end:false ] - bam: type: file - description: A list of BAM files to merge and then split + description: A list of one or more BAM files to merge and then split pattern: "*.bam" - bam_list: type: file From 7bcede747411d77f85dde0c33e3599e81c0cdb14 Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Fri, 22 Apr 2022 19:34:14 +0200 Subject: [PATCH 157/283] Update modules/bamtools/split/meta.yml Co-authored-by: Harshil Patel --- modules/bamtools/split/meta.yml | 5 ----- 1 file changed, 5 deletions(-) diff --git a/modules/bamtools/split/meta.yml b/modules/bamtools/split/meta.yml index badc6974..8af701f0 100644 --- a/modules/bamtools/split/meta.yml +++ b/modules/bamtools/split/meta.yml @@ -25,11 +25,6 @@ input: type: file description: A list of one or more BAM files to merge and then split pattern: "*.bam" - - bam_list: - type: file - description: | - Optional input file containing bam files to merge before splitting, - one line per file output: - meta: From b73662c8d9b5a770904ae9f3b1a79c878f125c6c Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Fri, 22 Apr 2022 19:34:32 +0200 Subject: [PATCH 158/283] Update tests/modules/bamtools/split/main.nf Co-authored-by: Harshil Patel --- tests/modules/bamtools/split/main.nf | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/tests/modules/bamtools/split/main.nf b/tests/modules/bamtools/split/main.nf index 9787b4ed..e5c15c32 100644 --- a/tests/modules/bamtools/split/main.nf +++ b/tests/modules/bamtools/split/main.nf @@ -19,9 +19,10 @@ workflow test_bamtools_split_multiple { input = [ [ id:'test', single_end:false ], // meta map - [file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), - file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_bam'], checkIfExists: true)] - + [ + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_sorted_bam'], checkIfExists: true) + ] ] BAMTOOLS_SPLIT_MULTIPLE ( input ) From 6efb022f6943ff67421b46359b38582a36278fe5 Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Fri, 22 Apr 2022 19:34:44 +0200 Subject: [PATCH 159/283] Update tests/modules/bamtools/split/test.yml Co-authored-by: Harshil Patel --- tests/modules/bamtools/split/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/modules/bamtools/split/test.yml b/tests/modules/bamtools/split/test.yml index 63ecac57..5747aa25 100644 --- a/tests/modules/bamtools/split/test.yml +++ b/tests/modules/bamtools/split/test.yml @@ -11,7 +11,7 @@ - path: output/bamtools/versions.yml - name: bamtools split test_bamtools_split_multiple - command: nextflow run tests/modules/bamtools/split -entry test_bamtools_split_multiple -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bamtools/split -entry test_bamtools_split_multiple -c ./tests/config/nextflow.config -c ./tests/modules/bamtools/split/nextflow.config tags: - bamtools - bamtools/split From e9c44f3d85b731add02680fa6a246c0675e1aac7 Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Fri, 22 Apr 2022 19:35:34 +0200 Subject: [PATCH 160/283] Update tests/modules/bamtools/split/test.yml Co-authored-by: Harshil Patel --- tests/modules/bamtools/split/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/modules/bamtools/split/test.yml b/tests/modules/bamtools/split/test.yml index 5747aa25..af639b43 100644 --- a/tests/modules/bamtools/split/test.yml +++ b/tests/modules/bamtools/split/test.yml @@ -1,5 +1,5 @@ - name: bamtools split test_bamtools_split_single_input - command: nextflow run tests/modules/bamtools/split -entry test_bamtools_split_single_input -c tests/config/nextflow.config + command: nextflow run ./tests/modules/bamtools/split -entry test_bamtools_split_single_input -c ./tests/config/nextflow.config -c ./tests/modules/bamtools/split/nextflow.config tags: - bamtools - bamtools/split From 2d32267fcdce945375294cb776ca129038d2b903 Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Fri, 22 Apr 2022 19:36:01 +0200 Subject: [PATCH 161/283] Update modules/bamtools/split/main.nf Co-authored-by: Harshil Patel --- modules/bamtools/split/main.nf | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/modules/bamtools/split/main.nf b/modules/bamtools/split/main.nf index 7184261e..9c643b84 100644 --- a/modules/bamtools/split/main.nf +++ b/modules/bamtools/split/main.nf @@ -24,11 +24,13 @@ process BAMTOOLS_SPLIT { def stub = !args.contains("-stub") ? "-stub ${prefix}" : "" """ - bamtools merge \\ - ${input_list} \\ - | bamtools split \\ - $stub \\ - $args + bamtools \\ + merge \\ + $input_list \\ + | bamtools \\ + split \\ + -stub $prefix \\ + $args cat <<-END_VERSIONS > versions.yml "${task.process}": From da56a2d50968f12ff2aa1a6d6b05fae083ba26c3 Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Fri, 22 Apr 2022 19:36:08 +0200 Subject: [PATCH 162/283] Update modules/bamtools/split/main.nf Co-authored-by: Harshil Patel --- modules/bamtools/split/main.nf | 2 -- 1 file changed, 2 deletions(-) diff --git a/modules/bamtools/split/main.nf b/modules/bamtools/split/main.nf index 9c643b84..aaa5b663 100644 --- a/modules/bamtools/split/main.nf +++ b/modules/bamtools/split/main.nf @@ -21,8 +21,6 @@ process BAMTOOLS_SPLIT { def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" def input_list = bam.collect{"-in $it"}.join(' ') - def stub = !args.contains("-stub") ? "-stub ${prefix}" : "" - """ bamtools \\ merge \\ From 378fa5fbb47ed7c8211b01acb5cf1f66111c4e13 Mon Sep 17 00:00:00 2001 From: Sateesh Peri <33637490+sateeshperi@users.noreply.github.com> Date: Sat, 23 Apr 2022 10:12:06 -0400 Subject: [PATCH 163/283] new module: GAMMA (#1532) * initial version of gamma module * remove trailing whitespace * prettier fix * hardcode version number * Update modules/gamma/main.nf Co-authored-by: James A. Fellows Yates * Update modules/gamma/main.nf Co-authored-by: James A. Fellows Yates * Update modules/gamma/meta.yml Co-authored-by: Robert A. Petit III * update meta and prettier * add whitespaces * add fasta output and tests Co-authored-by: Robert A. Petit III Co-authored-by: James A. Fellows Yates --- modules/gamma/main.nf | 41 +++++++++++++++++++ modules/gamma/meta.yml | 63 +++++++++++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/gamma/main.nf | 17 ++++++++ tests/modules/gamma/nextflow.config | 7 ++++ tests/modules/gamma/test.yml | 13 ++++++ 6 files changed, 145 insertions(+) create mode 100644 modules/gamma/main.nf create mode 100644 modules/gamma/meta.yml create mode 100644 tests/modules/gamma/main.nf create mode 100644 tests/modules/gamma/nextflow.config create mode 100644 tests/modules/gamma/test.yml diff --git a/modules/gamma/main.nf b/modules/gamma/main.nf new file mode 100644 index 00000000..e176ee68 --- /dev/null +++ b/modules/gamma/main.nf @@ -0,0 +1,41 @@ +def VERSION = '2.1' // Version information not provided by tool on CLI + +process GAMMA { + tag "$meta.id" + label 'process_low' + + conda (params.enable_conda ? "bioconda::gamma=2.1" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/gamma%3A2.1--hdfd78af_0': + 'quay.io/biocontainers/gamma:2.1--hdfd78af_0' }" + + input: + tuple val(meta), path(fasta) + path(db) + + output: + tuple val(meta), path("*.gamma") , emit: gamma + tuple val(meta), path("*.psl") , emit: psl + tuple val(meta), path("*.gff") , optional:true , emit: gff + tuple val(meta), path("*.fasta"), optional:true , emit: fasta + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + """ + GAMMA.py \\ + $args \\ + $fasta \\ + $db \\ + $prefix + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + gamma: $VERSION + END_VERSIONS + """ +} diff --git a/modules/gamma/meta.yml b/modules/gamma/meta.yml new file mode 100644 index 00000000..316b685b --- /dev/null +++ b/modules/gamma/meta.yml @@ -0,0 +1,63 @@ +name: "gamma" +description: Gene Allele Mutation Microbial Assessment +keywords: + - gamma + - gene-calling +tools: + - "gamma": + description: "Tool for Gene Allele Mutation Microbial Assessment" + homepage: "https://github.com/rastanton/GAMMA" + documentation: "https://github.com/rastanton/GAMMA" + tool_dev_url: "https://github.com/rastanton/GAMMA" + doi: "10.1093/bioinformatics/btab607" + licence: "['Apache License 2.0']" + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - fasta: + type: file + description: FASTA file + pattern: "*.{fa,fasta}" + - db: + type: file + description: Database in FASTA format + pattern: "*.{fa,fasta}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + + - gamma: + type: file + description: GAMMA file with annotated gene matches + pattern: "*.{gamma}" + + - psl: + type: file + description: PSL file with all gene matches found + pattern: "*.{psl}" + + - gff: + type: file + description: GFF file + pattern: "*.{gff}" + + - fasta: + type: file + description: multifasta file of the gene matches + pattern: "*.{fasta}" + +authors: + - "@sateeshperi" + - "@rastanton" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index a1a969e7..263e83a8 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -675,6 +675,10 @@ freebayes: - modules/freebayes/** - tests/modules/freebayes/** +gamma: + - modules/gamma/** + - tests/modules/gamma/** + gatk4/applybqsr: - modules/gatk4/applybqsr/** - tests/modules/gatk4/applybqsr/** diff --git a/tests/modules/gamma/main.nf b/tests/modules/gamma/main.nf new file mode 100644 index 00000000..f9477706 --- /dev/null +++ b/tests/modules/gamma/main.nf @@ -0,0 +1,17 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { GAMMA } from '../../../modules/gamma/main.nf' + +workflow test_gamma { + + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + ] + + db = [ file(params.test_data['sarscov2']['genome']['transcriptome_fasta'], checkIfExists: true) ] + + GAMMA ( input, db ) +} diff --git a/tests/modules/gamma/nextflow.config b/tests/modules/gamma/nextflow.config new file mode 100644 index 00000000..bbbf4de0 --- /dev/null +++ b/tests/modules/gamma/nextflow.config @@ -0,0 +1,7 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + ext.args = '--fasta' + +} diff --git a/tests/modules/gamma/test.yml b/tests/modules/gamma/test.yml new file mode 100644 index 00000000..1b493b49 --- /dev/null +++ b/tests/modules/gamma/test.yml @@ -0,0 +1,13 @@ +- name: gamma test_gamma + command: nextflow run tests/modules/gamma -entry test_gamma -c tests/config/nextflow.config + tags: + - gamma + files: + - path: output/gamma/test.fasta + md5sum: df37b48466181311e0a679f3c5878484 + - path: output/gamma/test.gamma + md5sum: 3256708fa517a65ed01d99e0e3c762ae + - path: output/gamma/test.psl + md5sum: 162a2757ed3b167ae1e0cdb24213f940 + - path: output/gamma/versions.yml + md5sum: 3fefb5b46c94993362243c5f9a472057 From 569e07f0af74e2a6ea43fca61ae90bb762893461 Mon Sep 17 00:00:00 2001 From: "Maxime U. Garcia" Date: Mon, 25 Apr 2022 14:32:49 +0200 Subject: [PATCH 164/283] add samtools/bamtocram modules (#1561) * add new samtools/bamtocram module * fix md5sum * remove md5sum * Update modules/samtools/bamtocram/main.nf Co-authored-by: James A. Fellows Yates Co-authored-by: James A. Fellows Yates --- modules/samtools/bamtocram/main.nf | 35 +++++++++++++ modules/samtools/bamtocram/meta.yml | 52 +++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/samtools/bamtocram/main.nf | 17 ++++++ .../samtools/bamtocram/nextflow.config | 5 ++ tests/modules/samtools/bamtocram/test.yml | 9 ++++ 6 files changed, 122 insertions(+) create mode 100644 modules/samtools/bamtocram/main.nf create mode 100644 modules/samtools/bamtocram/meta.yml create mode 100644 tests/modules/samtools/bamtocram/main.nf create mode 100644 tests/modules/samtools/bamtocram/nextflow.config create mode 100644 tests/modules/samtools/bamtocram/test.yml diff --git a/modules/samtools/bamtocram/main.nf b/modules/samtools/bamtocram/main.nf new file mode 100644 index 00000000..b49c308f --- /dev/null +++ b/modules/samtools/bamtocram/main.nf @@ -0,0 +1,35 @@ +//There is a -L option to only output alignments in interval, might be an option for exons/panel data? +process SAMTOOLS_BAMTOCRAM { + tag "$meta.id" + label 'process_medium' + + conda (params.enable_conda ? "bioconda::samtools=1.15.1" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/samtools:1.15.1--h1170115_0' : + 'quay.io/biocontainers/samtools:1.15.1--h1170115_0' }" + + input: + tuple val(meta), path(input), path(index) + path fasta + path fai + + output: + tuple val(meta), path("*.cram"), path("*.crai"), emit: cram_crai + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + """ + samtools view --threads ${task.cpus} --reference ${fasta} -C $args $input > ${prefix}.cram + samtools index -@${task.cpus} ${prefix}.cram + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + END_VERSIONS + """ +} diff --git a/modules/samtools/bamtocram/meta.yml b/modules/samtools/bamtocram/meta.yml new file mode 100644 index 00000000..037704c6 --- /dev/null +++ b/modules/samtools/bamtocram/meta.yml @@ -0,0 +1,52 @@ +name: samtools_bamtocram +description: filter/convert and then index CRAM file +keywords: + - view + - index + - bam + - cram +tools: + - samtools: + description: | + SAMtools is a set of utilities for interacting with and post-processing + short DNA sequence read alignments in the SAM, BAM and CRAM formats, written by Heng Li. + These files are generated as output by short read aligners like BWA. + homepage: http://www.htslib.org/ + documentation: hhttp://www.htslib.org/doc/samtools.html + doi: 10.1093/bioinformatics/btp352 + licence: ["MIT"] +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - input: + type: file + description: BAM/SAM file + pattern: "*.{bam,sam}" + - index: + type: file + description: BAM/SAM index file + pattern: "*.{bai,sai}" + - fasta: + type: file + description: Reference file to create the CRAM file + pattern: "*.{fasta,fa}" +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + - cram_crai: + type: file + description: filtered/converted CRAM file + index + pattern: "*{.cram,.crai}" + - version: + type: file + description: File containing software version + pattern: "*.{version.txt}" +authors: + - "@FriederikeHanssen" + - "@maxulysse" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 263e83a8..4d8ce0b5 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -1595,6 +1595,10 @@ samtools/bam2fq: - modules/samtools/bam2fq/** - tests/modules/samtools/bam2fq/** +samtools/bamtocram: + - modules/samtools/bamtocram/** + - tests/modules/samtools/bamtocram/** + samtools/collatefastq: - modules/samtools/collatefastq/** - tests/modules/samtools/collatefastq/** diff --git a/tests/modules/samtools/bamtocram/main.nf b/tests/modules/samtools/bamtocram/main.nf new file mode 100644 index 00000000..b1743310 --- /dev/null +++ b/tests/modules/samtools/bamtocram/main.nf @@ -0,0 +1,17 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { SAMTOOLS_BAMTOCRAM } from '../../../../modules/samtools/bamtocram/main.nf' + +workflow test_samtools_bamtocram { + + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true)] + + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['sarscov2']['genome']['genome_fasta_fai'], checkIfExists: true) + + SAMTOOLS_BAMTOCRAM ( input, fasta, fai ) +} \ No newline at end of file diff --git a/tests/modules/samtools/bamtocram/nextflow.config b/tests/modules/samtools/bamtocram/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/modules/samtools/bamtocram/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} diff --git a/tests/modules/samtools/bamtocram/test.yml b/tests/modules/samtools/bamtocram/test.yml new file mode 100644 index 00000000..3cb82902 --- /dev/null +++ b/tests/modules/samtools/bamtocram/test.yml @@ -0,0 +1,9 @@ +- name: samtools bamtocram test_samtools_bamtocram + command: nextflow run ./tests/modules/samtools/bamtocram -entry test_samtools_bamtocram -c ./tests/config/nextflow.config -c ./tests/modules/samtools/bamtocram/nextflow.config + tags: + - samtools/bamtocram + - samtools + files: + - path: output/samtools/test.cram + - path: output/samtools/test.cram.crai + - path: output/samtools/versions.yml From 6a46e7cf117bbae41bca9b4c7ff7a77794df16ec Mon Sep 17 00:00:00 2001 From: Jose Espinosa-Carrasco Date: Mon, 25 Apr 2022 19:07:43 +0200 Subject: [PATCH 165/283] Allow to pass arguments to the phantompeakqualtools script itself (#1562) * Allow to pass arguments to the script itself * Place args2 correctly * Define args2 aaaaarrrrgggg * Testing locally before commiting is a good practice * Update modules/phantompeakqualtools/main.nf Co-authored-by: Harshil Patel Co-authored-by: Harshil Patel --- modules/phantompeakqualtools/main.nf | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/modules/phantompeakqualtools/main.nf b/modules/phantompeakqualtools/main.nf index 0362b2e7..d8f73342 100644 --- a/modules/phantompeakqualtools/main.nf +++ b/modules/phantompeakqualtools/main.nf @@ -22,11 +22,12 @@ process PHANTOMPEAKQUALTOOLS { task.ext.when == null || task.ext.when script: - def args = task.ext.args ?: '' + def args = task.ext.args ?: '' + def args2 = task.ext.args2 ?: '' def prefix = task.ext.prefix ?: "${meta.id}" """ RUN_SPP=`which run_spp.R` - Rscript $args -e "library(caTools); source(\\"\$RUN_SPP\\")" -c="$bam" -savp="${prefix}.spp.pdf" -savd="${prefix}.spp.Rdata" -out="${prefix}.spp.out" + Rscript $args -e "library(caTools); source(\\"\$RUN_SPP\\")" -c="$bam" -savp="${prefix}.spp.pdf" -savd="${prefix}.spp.Rdata" -out="${prefix}.spp.out" $args2 cat <<-END_VERSIONS > versions.yml "${task.process}": From 1f77bc130b26309573670c58053a928a4602a925 Mon Sep 17 00:00:00 2001 From: Sofia Stamouli <91951607+sofstam@users.noreply.github.com> Date: Tue, 26 Apr 2022 16:32:20 +0200 Subject: [PATCH 166/283] Update minimap2/align module (#1537) --- modules/minimap2/align/main.nf | 22 ++++++++++++++++------ modules/minimap2/align/meta.yml | 18 ++++++++++++++++++ tests/modules/minimap2/align/main.nf | 10 ++++++++-- tests/modules/minimap2/align/test.yml | 16 ++++++++-------- 4 files changed, 50 insertions(+), 16 deletions(-) diff --git a/modules/minimap2/align/main.nf b/modules/minimap2/align/main.nf index fe06f14d..7ba05ee9 100644 --- a/modules/minimap2/align/main.nf +++ b/modules/minimap2/align/main.nf @@ -2,18 +2,22 @@ process MINIMAP2_ALIGN { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? 'bioconda::minimap2=2.21' : null) + conda (params.enable_conda ? 'bioconda::minimap2=2.21 bioconda::samtools=1.12' : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/minimap2:2.21--h5bf99c6_0' : - 'quay.io/biocontainers/minimap2:2.21--h5bf99c6_0' }" + 'https://depot.galaxyproject.org/singularity/mulled-v2-66534bcbb7031a148b13e2ad42583020b9cd25c4:1679e915ddb9d6b4abda91880c4b48857d471bd8-0' : + 'quay.io/biocontainers/mulled-v2-66534bcbb7031a148b13e2ad42583020b9cd25c4:1679e915ddb9d6b4abda91880c4b48857d471bd8-0' }" input: tuple val(meta), path(reads) path reference + val bam_format + val cigar_paf_format + val cigar_bam output: - tuple val(meta), path("*.paf"), emit: paf - path "versions.yml" , emit: versions + tuple val(meta), path("*.paf"), optional: true, emit: paf + tuple val(meta), path("*.bam"), optional: true, emit: bam + path "versions.yml" , emit: versions when: task.ext.when == null || task.ext.when @@ -22,13 +26,19 @@ process MINIMAP2_ALIGN { def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" def input_reads = meta.single_end ? "$reads" : "${reads[0]} ${reads[1]}" + def bam_output = bam_format ? "-a | samtools sort | samtools view -@ ${task.cpus} -b -h -o ${prefix}.bam" : "-o ${prefix}.paf" + def cigar_paf = cigar_paf_format && !sam_format ? "-c" : '' + def set_cigar_bam = cigar_bam && sam_format ? "-L" : '' """ minimap2 \\ $args \\ -t $task.cpus \\ $reference \\ $input_reads \\ - > ${prefix}.paf + $cigar_paf \\ + $set_cigar_bam \\ + $bam_output + cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/modules/minimap2/align/meta.yml b/modules/minimap2/align/meta.yml index 89e24283..991b39a0 100644 --- a/modules/minimap2/align/meta.yml +++ b/modules/minimap2/align/meta.yml @@ -29,6 +29,17 @@ input: type: file description: | Reference database in FASTA format. + - bam_format: + type: boolean + description: Specify that output should be in BAM format + - cigar_paf_format: + type: boolean + description: Specify that output CIGAR should be in PAF format + - cigar_bam: + type: boolean + description: | + Write CIGAR with >65535 ops at the CG tag. This is recommended when + doing XYZ (https://github.com/lh3/minimap2#working-with-65535-cigar-operations) output: - meta: type: map @@ -39,9 +50,16 @@ output: type: file description: Alignment in PAF format pattern: "*.paf" + - bam: + type: file + description: Alignment in BAM format + pattern: "*.bam" - versions: type: file description: File containing software versions pattern: "versions.yml" authors: - "@heuermh" + - "@sofstam" + - "@sateeshperi" + - "@jfy133" diff --git a/tests/modules/minimap2/align/main.nf b/tests/modules/minimap2/align/main.nf index e507d3e5..ee6c0838 100644 --- a/tests/modules/minimap2/align/main.nf +++ b/tests/modules/minimap2/align/main.nf @@ -9,8 +9,11 @@ workflow test_minimap2_align_single_end { [ file(params.test_data['sarscov2']['illumina']['test_1_fastq_gz'], checkIfExists: true)] ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + bam_format = true + cigar_paf_format = false + cigar_bam = false - MINIMAP2_ALIGN ( input, fasta ) + MINIMAP2_ALIGN ( input, fasta, bam_format, cigar_paf_format, cigar_bam) } workflow test_minimap2_align_paired_end { @@ -19,6 +22,9 @@ workflow test_minimap2_align_paired_end { file(params.test_data['sarscov2']['illumina']['test_2_fastq_gz'], checkIfExists: true) ] ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + bam_format = true + cigar_paf_format = false + cigar_bam = false - MINIMAP2_ALIGN ( input, fasta ) + MINIMAP2_ALIGN ( input, fasta, bam_format, cigar_paf_format, cigar_bam ) } diff --git a/tests/modules/minimap2/align/test.yml b/tests/modules/minimap2/align/test.yml index 73dd73e2..c392e313 100644 --- a/tests/modules/minimap2/align/test.yml +++ b/tests/modules/minimap2/align/test.yml @@ -1,17 +1,17 @@ -- name: minimap2 align single-end - command: nextflow run ./tests/modules/minimap2/align -entry test_minimap2_align_single_end -c ./tests/config/nextflow.config -c ./tests/modules/minimap2/align/nextflow.config +- name: minimap2 align test_minimap2_align_single_end + command: nextflow run tests/modules/minimap2/align -entry test_minimap2_align_single_end -c tests/config/nextflow.config tags: - minimap2 - minimap2/align files: - - path: ./output/minimap2/test.paf - md5sum: 70e8cf299ee3ecd33e629d10c1f588ce + - path: output/minimap2/test.bam + - path: output/minimap2/versions.yml -- name: minimap2 align paired-end - command: nextflow run ./tests/modules/minimap2/align -entry test_minimap2_align_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/minimap2/align/nextflow.config +- name: minimap2 align test_minimap2_align_paired_end + command: nextflow run tests/modules/minimap2/align -entry test_minimap2_align_paired_end -c tests/config/nextflow.config tags: - minimap2 - minimap2/align files: - - path: ./output/minimap2/test.paf - md5sum: 5e7b55a26bf0ea3a2843423d3e0b9a28 + - path: output/minimap2/test.bam + - path: output/minimap2/versions.yml From 134272c7ee2de794554d51d1a55ea2a4a7e3f9a0 Mon Sep 17 00:00:00 2001 From: jasmezz Date: Tue, 26 Apr 2022 17:04:57 +0200 Subject: [PATCH 167/283] Add missing container folders as output channels --- .../antismashlitedownloaddatabases/main.nf | 6 +++++- .../antismashlitedownloaddatabases/meta.yml | 21 ++++++++++++++++--- .../antismashlitedownloaddatabases/test.yml | 11 +++++++++- 3 files changed, 33 insertions(+), 5 deletions(-) diff --git a/modules/antismash/antismashlitedownloaddatabases/main.nf b/modules/antismash/antismashlitedownloaddatabases/main.nf index 1853d80a..2154bafa 100644 --- a/modules/antismash/antismashlitedownloaddatabases/main.nf +++ b/modules/antismash/antismashlitedownloaddatabases/main.nf @@ -7,8 +7,9 @@ process ANTISMASH_ANTISMASHLITEDOWNLOADDATABASES { 'quay.io/biocontainers/antismash-lite:6.0.1--pyhdfd78af_1' }" /* - These files are normally downloaded by download-antismash-databases itself, and must be retrieved for input by manually running the command with conda or a standalone installation of antiSMASH. Therefore we do not recommend using this module for production pipelines, but rather require users to specify their own local copy of the antiSMASH database in pipelines. This is solely for use for CI tests of the nf-core/module version of antiSMASH. + These files are normally downloaded/created by download-antismash-databases itself, and must be retrieved for input by manually running the command with conda or a standalone installation of antiSMASH. Therefore we do not recommend using this module for production pipelines, but rather require users to specify their own local copy of the antiSMASH database in pipelines. This is solely for use for CI tests of the nf-core/module version of antiSMASH. Reason: Upon execution, the tool checks if certain database files are present within the container and if not, it tries to create them in /usr/local/bin, for which only root user has write permissions. Mounting those database files with this module prevents the tool from trying to create them. + These files are also emitted as output channels in this module to enable the antismash-lite module to use them as mount volumes to the docker/singularity containers. */ containerOptions { @@ -26,6 +27,9 @@ process ANTISMASH_ANTISMASHLITEDOWNLOADDATABASES { output: path("antismash_db") , emit: database + path("css"), emit: css_dir + path("detection"), emit: detection_dir + path("modules"), emit: modules_dir path "versions.yml", emit: versions when: diff --git a/modules/antismash/antismashlitedownloaddatabases/meta.yml b/modules/antismash/antismashlitedownloaddatabases/meta.yml index ad393bae..9e95957a 100644 --- a/modules/antismash/antismashlitedownloaddatabases/meta.yml +++ b/modules/antismash/antismashlitedownloaddatabases/meta.yml @@ -27,17 +27,17 @@ input: - database_css: type: directory description: | - antismash/outputs/html/css folder which is being created during the antiSMASH database downloading step. These files are normally downloaded by download-antismash-databases itself, and must be retrieved by the use by manually running the command with conda or a standalone installation of antiSMASH. Therefore we do not recommend using this module for production pipelines, but rather require users to specify their own local copy of the antiSMASH database in pipelines. + antismash/outputs/html/css folder which is being created during the antiSMASH database downloading step. These files are normally downloaded by download-antismash-databases itself, and must be retrieved by ther use by manually running the command with conda or a standalone installation of antiSMASH. Therefore we do not recommend using this module for production pipelines, but rather require users to specify their own local copy of the antiSMASH database in pipelines. pattern: "css" - database_detection: type: directory description: | - antismash/detection folder which is being created during the antiSMASH database downloading step. These files are normally downloaded by download-antismash-databases itself, and must be retrieved by the use by manually running the command with conda or a standalone installation of antiSMASH. Therefore we do not recommend using this module for production pipelines, but rather require users to specify their own local copy of the antiSMASH database in pipelines. + antismash/detection folder which is being created during the antiSMASH database downloading step. These files are normally downloaded by download-antismash-databases itself, and must be retrieved by the user by manually running the command with conda or a standalone installation of antiSMASH. Therefore we do not recommend using this module for production pipelines, but rather require users to specify their own local copy of the antiSMASH database in pipelines. pattern: "detection" - database_modules: type: directory description: | - antismash/modules folder which is being created during the antiSMASH database downloading step. These files are normally downloaded by download-antismash-databases itself, and must be retrieved by the use by manually running the command with conda or a standalone installation of antiSMASH. Therefore we do not recommend using this module for production pipelines, but rather require users to specify their own local copy of the antiSMASH database in pipelines. + antismash/modules folder which is being created during the antiSMASH database downloading step. These files are normally downloaded by download-antismash-databases itself, and must be retrieved by the user by manually running the command with conda or a standalone installation of antiSMASH. Therefore we do not recommend using this module for production pipelines, but rather require users to specify their own local copy of the antiSMASH database in pipelines. pattern: "modules" output: @@ -50,6 +50,21 @@ output: type: directory description: Download directory for antiSMASH databases pattern: "antismash_db" + - css_dir: + type: directory + description: | + antismash/outputs/html/css folder which is being created during the antiSMASH database downloading step. These files are normally downloaded by download-antismash-databases itself, and must be retrieved by the user by manually running the command with conda or a standalone installation of antiSMASH. Therefore we do not recommend using this module for production pipelines, but rather require users to specify their own local copy of the antiSMASH database in pipelines. + pattern: "css" + - detection_dir: + type: directory + description: | + antismash/detection folder which is being created during the antiSMASH database downloading step. These files are normally downloaded by download-antismash-databases itself, and must be retrieved by the user by manually running the command with conda or a standalone installation of antiSMASH. Therefore we do not recommend using this module for production pipelines, but rather require users to specify their own local copy of the antiSMASH database in pipelines. + pattern: "detection" + - modules_dir: + type: directory + description: | + antismash/modules folder which is being created during the antiSMASH database downloading step. These files are normally downloaded by download-antismash-databases itself, and must be retrieved by the user by manually running the command with conda or a standalone installation of antiSMASH. Therefore we do not recommend using this module for production pipelines, but rather require users to specify their own local copy of the antiSMASH database in pipelines. + pattern: "modules" authors: - "@jasmezz" diff --git a/tests/modules/antismash/antismashlitedownloaddatabases/test.yml b/tests/modules/antismash/antismashlitedownloaddatabases/test.yml index 3493bb4b..808e3b7e 100644 --- a/tests/modules/antismash/antismashlitedownloaddatabases/test.yml +++ b/tests/modules/antismash/antismashlitedownloaddatabases/test.yml @@ -1,14 +1,23 @@ - name: antismash antismashlitedownloaddatabases test_antismash_antismashlitedownloaddatabases command: nextflow run tests/modules/antismash/antismashlitedownloaddatabases -entry test_antismash_antismashlitedownloaddatabases -c tests/config/nextflow.config tags: - - antismash/antismashlitedownloaddatabases - antismash + - antismash/antismashlitedownloaddatabases files: - path: output/antismash/versions.yml md5sum: e2656c8d2bcc7469eba40eb1ee5c91b3 + - path: output/untar1/versions.yml + md5sum: b724089c7a3b22557626e3d6cf79884d + - path: output/untar2/versions.yml + md5sum: 7182a024e050e5fb6b8830930e551adc + - path: output/untar3/versions.yml + md5sum: d27a9b44dd969d74d237e52ac89bd8e5 - path: output/antismash/antismash_db - path: output/antismash/antismash_db/clusterblast - path: output/antismash/antismash_db/clustercompare - path: output/antismash/antismash_db/pfam - path: output/antismash/antismash_db/resfam - path: output/antismash/antismash_db/tigrfam + - path: output/antismash/css + - path: output/antismash/detection + - path: output/antismash/modules From 85ec13ff1fc2196c5a507ea497de468101baabed Mon Sep 17 00:00:00 2001 From: Harshil Patel Date: Tue, 26 Apr 2022 19:15:24 +0100 Subject: [PATCH 168/283] Add Trimgalore output for unpaired reads (#1568) * Add Trimgalore output for unpaired reads * Use glob instead of outprefix --- modules/trimgalore/main.nf | 13 ++++++++----- modules/trimgalore/meta.yml | 5 +++++ 2 files changed, 13 insertions(+), 5 deletions(-) diff --git a/modules/trimgalore/main.nf b/modules/trimgalore/main.nf index 9487c799..3a3fca90 100644 --- a/modules/trimgalore/main.nf +++ b/modules/trimgalore/main.nf @@ -11,12 +11,13 @@ process TRIMGALORE { tuple val(meta), path(reads) output: - tuple val(meta), path("*.fq.gz") , emit: reads - tuple val(meta), path("*report.txt"), emit: log - path "versions.yml" , emit: versions + tuple val(meta), path("*{trimmed,val}*.fq.gz"), emit: reads + tuple val(meta), path("*report.txt") , emit: log + path "versions.yml" , emit: versions - tuple val(meta), path("*.html"), emit: html optional true - tuple val(meta), path("*.zip") , emit: zip optional true + tuple val(meta), path("*unpaired*.fq.gz") , emit: unpaired, optional: true + tuple val(meta), path("*.html") , emit: html , optional: true + tuple val(meta), path("*.zip") , emit: zip , optional: true when: task.ext.when == null || task.ext.when @@ -52,6 +53,7 @@ process TRIMGALORE { $c_r1 \\ $tpc_r1 \\ ${prefix}.fastq.gz + cat <<-END_VERSIONS > versions.yml "${task.process}": trimgalore: \$(echo \$(trim_galore --version 2>&1) | sed 's/^.*version //; s/Last.*\$//') @@ -73,6 +75,7 @@ process TRIMGALORE { $tpc_r2 \\ ${prefix}_1.fastq.gz \\ ${prefix}_2.fastq.gz + cat <<-END_VERSIONS > versions.yml "${task.process}": trimgalore: \$(echo \$(trim_galore --version 2>&1) | sed 's/^.*version //; s/Last.*\$//') diff --git a/modules/trimgalore/meta.yml b/modules/trimgalore/meta.yml index e99a8833..439f566d 100644 --- a/modules/trimgalore/meta.yml +++ b/modules/trimgalore/meta.yml @@ -37,6 +37,11 @@ output: List of input adapter trimmed FastQ files of size 1 and 2 for single-end and paired-end data, respectively. pattern: "*.{fq.gz}" + - unpaired: + type: file + description: | + FastQ files containing unpaired reads from read 1 or read 2 + pattern: "*unpaired*.fq.gz" - html: type: file description: FastQC report (optional) From d79433dcac78aa231f21177a3d69fde3cceb1cec Mon Sep 17 00:00:00 2001 From: Jasmin F <73216762+jasmezz@users.noreply.github.com> Date: Wed, 27 Apr 2022 10:08:07 +0200 Subject: [PATCH 169/283] Apply suggestions from code review Co-authored-by: James A. Fellows Yates --- modules/antismash/antismashlitedownloaddatabases/meta.yml | 2 +- .../antismash/antismashlitedownloaddatabases/test.yml | 6 ------ 2 files changed, 1 insertion(+), 7 deletions(-) diff --git a/modules/antismash/antismashlitedownloaddatabases/meta.yml b/modules/antismash/antismashlitedownloaddatabases/meta.yml index 9e95957a..619dc8c2 100644 --- a/modules/antismash/antismashlitedownloaddatabases/meta.yml +++ b/modules/antismash/antismashlitedownloaddatabases/meta.yml @@ -27,7 +27,7 @@ input: - database_css: type: directory description: | - antismash/outputs/html/css folder which is being created during the antiSMASH database downloading step. These files are normally downloaded by download-antismash-databases itself, and must be retrieved by ther use by manually running the command with conda or a standalone installation of antiSMASH. Therefore we do not recommend using this module for production pipelines, but rather require users to specify their own local copy of the antiSMASH database in pipelines. + antismash/outputs/html/css folder which is being created during the antiSMASH database downloading step. These files are normally downloaded by download-antismash-databases itself, and must be retrieved by the user by manually running the command with conda or a standalone installation of antiSMASH. Therefore we do not recommend using this module for production pipelines, but rather require users to specify their own local copy of the antiSMASH database in pipelines. pattern: "css" - database_detection: type: directory diff --git a/tests/modules/antismash/antismashlitedownloaddatabases/test.yml b/tests/modules/antismash/antismashlitedownloaddatabases/test.yml index 808e3b7e..ac38eee3 100644 --- a/tests/modules/antismash/antismashlitedownloaddatabases/test.yml +++ b/tests/modules/antismash/antismashlitedownloaddatabases/test.yml @@ -6,12 +6,6 @@ files: - path: output/antismash/versions.yml md5sum: e2656c8d2bcc7469eba40eb1ee5c91b3 - - path: output/untar1/versions.yml - md5sum: b724089c7a3b22557626e3d6cf79884d - - path: output/untar2/versions.yml - md5sum: 7182a024e050e5fb6b8830930e551adc - - path: output/untar3/versions.yml - md5sum: d27a9b44dd969d74d237e52ac89bd8e5 - path: output/antismash/antismash_db - path: output/antismash/antismash_db/clusterblast - path: output/antismash/antismash_db/clustercompare From 31547edc1e2491c16c00ffff4ed34de193f72b65 Mon Sep 17 00:00:00 2001 From: Jasmin F <73216762+jasmezz@users.noreply.github.com> Date: Wed, 27 Apr 2022 10:14:14 +0200 Subject: [PATCH 170/283] Update tool name --- modules/antismash/antismashlitedownloaddatabases/main.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/antismash/antismashlitedownloaddatabases/main.nf b/modules/antismash/antismashlitedownloaddatabases/main.nf index 2154bafa..72314eee 100644 --- a/modules/antismash/antismashlitedownloaddatabases/main.nf +++ b/modules/antismash/antismashlitedownloaddatabases/main.nf @@ -44,7 +44,7 @@ process ANTISMASH_ANTISMASHLITEDOWNLOADDATABASES { cat <<-END_VERSIONS > versions.yml "${task.process}": - antismash: \$(antismash --version | sed 's/antiSMASH //') + antismash-lite: \$(antismash --version | sed 's/antiSMASH //') END_VERSIONS """ } From 024c992ca77d6bf5e4baddae30a38cd14d526042 Mon Sep 17 00:00:00 2001 From: Nicolas Vannieuwkerke Date: Wed, 27 Apr 2022 11:12:58 +0200 Subject: [PATCH 171/283] created the module --- modules/vardictjava/main.nf | 77 +++++++++++++++++++++++ modules/vardictjava/meta.yml | 51 +++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/vardictjava/main.nf | 15 +++++ tests/modules/vardictjava/nextflow.config | 8 +++ tests/modules/vardictjava/test.yml | 12 ++++ 6 files changed, 167 insertions(+) create mode 100644 modules/vardictjava/main.nf create mode 100644 modules/vardictjava/meta.yml create mode 100644 tests/modules/vardictjava/main.nf create mode 100644 tests/modules/vardictjava/nextflow.config create mode 100644 tests/modules/vardictjava/test.yml diff --git a/modules/vardictjava/main.nf b/modules/vardictjava/main.nf new file mode 100644 index 00000000..d52e15dd --- /dev/null +++ b/modules/vardictjava/main.nf @@ -0,0 +1,77 @@ +// TODO nf-core: If in doubt look at other nf-core/modules to see how we are doing things! :) +// https://github.com/nf-core/modules/tree/master/modules +// You can also ask for help via your pull request or on the #modules channel on the nf-core Slack workspace: +// https://nf-co.re/join +// TODO nf-core: A module file SHOULD only define input and output files as command-line parameters. +// All other parameters MUST be provided using the "task.ext" directive, see here: +// https://www.nextflow.io/docs/latest/process.html#ext +// where "task.ext" is a string. +// Any parameters that need to be evaluated in the context of a particular sample +// e.g. single-end/paired-end data MUST also be defined and evaluated appropriately. +// TODO nf-core: Software that can be piped together SHOULD be added to separate module files +// unless there is a run-time, storage advantage in implementing in this way +// e.g. it's ok to have a single module for bwa to output BAM instead of SAM: +// bwa mem | samtools view -B -T ref.fasta +// TODO nf-core: Optional inputs are not currently supported by Nextflow. However, using an empty +// list (`[]`) instead of a file can be used to work around this issue. + +process VARDICTJAVA { + tag "$meta.id" + label 'process_medium' + + // TODO nf-core: List required Conda package(s). + // Software MUST be pinned to channel (i.e. "bioconda"), version (i.e. "1.10"). + // For Conda, the build (i.e. "h9402c20_2") must be EXCLUDED to support installation on different operating systems. + // TODO nf-core: See section in main README for further information regarding finding and adding container addresses to the section below. + conda (params.enable_conda ? "bioconda::vardict-java=1.8.3" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/vardict-java:1.8.3--hdfd78af_0': + 'quay.io/biocontainers/vardict-java:1.8.3--hdfd78af_0' }" + + input: + // TODO nf-core: Where applicable all sample-specific information e.g. "id", "single_end", "read_group" + // MUST be provided as an input via a Groovy Map called "meta". + // This information may not be required in some instances e.g. indexing reference genome files: + // https://github.com/nf-core/modules/blob/master/modules/bwa/index/main.nf + // TODO nf-core: Where applicable please provide/convert compressed files as input/output + // e.g. "*.fastq.gz" and NOT "*.fastq", "*.bam" and NOT "*.sam" etc. + tuple val(meta), path(bam) + path(reference_fasta) + path(regions_of_interest) + + output: + // TODO nf-core: Named file extensions MUST be emitted for ALL output channels + tuple val(meta), path("*.bam"), emit: bam + // TODO nf-core: List additional required output channels/values here + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + // TODO nf-core: Where possible, a command MUST be provided to obtain the version number of the software e.g. 1.10 + // If the software is unable to output a version number on the command-line then it can be manually specified + // e.g. https://github.com/nf-core/modules/blob/master/modules/homer/annotatepeaks/main.nf + // Each software used MUST provide the software name and version number in the YAML version file (versions.yml) + // TODO nf-core: It MUST be possible to pass additional parameters to the tool as a command-line string via the "task.ext.args" directive + // TODO nf-core: If the tool supports multi-threading then you MUST provide the appropriate parameter + // using the Nextflow "task" variable e.g. "--threads $task.cpus" + // TODO nf-core: Please replace the example samtools command below with your module's command + // TODO nf-core: Please indent the command appropriately (4 spaces!!) to help with readability ;) + """ + vardict-java \\ + $args \\ + -b $bam \\ + -th $task.cpus \\ + -n $prefix \\ + -G $reference_fasta \\ + $regions_of_interest \\ + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + vardictjava: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//' )) + END_VERSIONS + """ +} diff --git a/modules/vardictjava/meta.yml b/modules/vardictjava/meta.yml new file mode 100644 index 00000000..e8c55de3 --- /dev/null +++ b/modules/vardictjava/meta.yml @@ -0,0 +1,51 @@ +name: "vardictjava" +## TODO nf-core: Add a description of the module and list keywords +description: write your description here +keywords: + - sort +tools: + - "vardictjava": + ## TODO nf-core: Add a description and other details for the software below + description: "Java port of the VarDict variant discovery program" + homepage: "None" + documentation: "None" + tool_dev_url: "None" + doi: "" + licence: "['MIT']" + +## TODO nf-core: Add a description of all of the variables used as input +input: + # Only when we have meta + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + # + ## TODO nf-core: Delete / customise this example input + - bam: + type: file + description: BAM/CRAM/SAM file + pattern: "*.{bam,cram,sam}" + +## TODO nf-core: Add a description of all of the variables used as output +output: + #Only when we have meta + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test', single_end:false ] + # + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + ## TODO nf-core: Delete / customise this example output + - bam: + type: file + description: Sorted BAM/CRAM/SAM file + pattern: "*.{bam,cram,sam}" + +authors: + - "@nvnieuwk" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 4d8ce0b5..8ca9f2e2 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -1908,6 +1908,10 @@ unzip: - modules/unzip/** - tests/modules/unzip/** +vardictjava: + - modules/vardictjava/** + - tests/modules/vardictjava/** + variantbam: - modules/variantbam/** - tests/modules/variantbam/** diff --git a/tests/modules/vardictjava/main.nf b/tests/modules/vardictjava/main.nf new file mode 100644 index 00000000..8a714b2b --- /dev/null +++ b/tests/modules/vardictjava/main.nf @@ -0,0 +1,15 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { VARDICTJAVA } from '../../../modules/vardictjava/main.nf' + +workflow test_vardictjava { + + input = [ + [ id:'test', single_end:false ], // meta map + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_bam'], checkIfExists: true) + ] + + VARDICTJAVA ( input ) +} diff --git a/tests/modules/vardictjava/nextflow.config b/tests/modules/vardictjava/nextflow.config new file mode 100644 index 00000000..5dc176a9 --- /dev/null +++ b/tests/modules/vardictjava/nextflow.config @@ -0,0 +1,8 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + + withName: VARDICTJAVA { + ext.args = '' + } +} \ No newline at end of file diff --git a/tests/modules/vardictjava/test.yml b/tests/modules/vardictjava/test.yml new file mode 100644 index 00000000..73c647af --- /dev/null +++ b/tests/modules/vardictjava/test.yml @@ -0,0 +1,12 @@ +## TODO nf-core: Please run the following command to build this file: +# nf-core modules create-test-yml vardictjava +- name: "vardictjava" + command: nextflow run ./tests/modules/vardictjava -entry test_vardictjava -c ./tests/config/nextflow.config -c ./tests/modules/vardictjava/nextflow.config + tags: + - "vardictjava" + # + files: + - path: "output/vardictjava/test.bam" + md5sum: e667c7caad0bc4b7ac383fd023c654fc + - path: output/vardictjava/versions.yml + md5sum: a01fe51bc4c6a3a6226fbf77b2c7cf3b From d57664b576d2433deed1b0fb742f3a3da05d5046 Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Wed, 27 Apr 2022 11:18:58 +0200 Subject: [PATCH 172/283] cleanup TODO's --- modules/vardictjava/main.nf | 39 +------------------------------------ 1 file changed, 1 insertion(+), 38 deletions(-) diff --git a/modules/vardictjava/main.nf b/modules/vardictjava/main.nf index d52e15dd..80beb540 100644 --- a/modules/vardictjava/main.nf +++ b/modules/vardictjava/main.nf @@ -1,48 +1,19 @@ -// TODO nf-core: If in doubt look at other nf-core/modules to see how we are doing things! :) -// https://github.com/nf-core/modules/tree/master/modules -// You can also ask for help via your pull request or on the #modules channel on the nf-core Slack workspace: -// https://nf-co.re/join -// TODO nf-core: A module file SHOULD only define input and output files as command-line parameters. -// All other parameters MUST be provided using the "task.ext" directive, see here: -// https://www.nextflow.io/docs/latest/process.html#ext -// where "task.ext" is a string. -// Any parameters that need to be evaluated in the context of a particular sample -// e.g. single-end/paired-end data MUST also be defined and evaluated appropriately. -// TODO nf-core: Software that can be piped together SHOULD be added to separate module files -// unless there is a run-time, storage advantage in implementing in this way -// e.g. it's ok to have a single module for bwa to output BAM instead of SAM: -// bwa mem | samtools view -B -T ref.fasta -// TODO nf-core: Optional inputs are not currently supported by Nextflow. However, using an empty -// list (`[]`) instead of a file can be used to work around this issue. - process VARDICTJAVA { tag "$meta.id" label 'process_medium' - // TODO nf-core: List required Conda package(s). - // Software MUST be pinned to channel (i.e. "bioconda"), version (i.e. "1.10"). - // For Conda, the build (i.e. "h9402c20_2") must be EXCLUDED to support installation on different operating systems. - // TODO nf-core: See section in main README for further information regarding finding and adding container addresses to the section below. conda (params.enable_conda ? "bioconda::vardict-java=1.8.3" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? 'https://depot.galaxyproject.org/singularity/vardict-java:1.8.3--hdfd78af_0': 'quay.io/biocontainers/vardict-java:1.8.3--hdfd78af_0' }" input: - // TODO nf-core: Where applicable all sample-specific information e.g. "id", "single_end", "read_group" - // MUST be provided as an input via a Groovy Map called "meta". - // This information may not be required in some instances e.g. indexing reference genome files: - // https://github.com/nf-core/modules/blob/master/modules/bwa/index/main.nf - // TODO nf-core: Where applicable please provide/convert compressed files as input/output - // e.g. "*.fastq.gz" and NOT "*.fastq", "*.bam" and NOT "*.sam" etc. tuple val(meta), path(bam) path(reference_fasta) path(regions_of_interest) output: - // TODO nf-core: Named file extensions MUST be emitted for ALL output channels tuple val(meta), path("*.bam"), emit: bam - // TODO nf-core: List additional required output channels/values here path "versions.yml" , emit: versions when: @@ -51,15 +22,7 @@ process VARDICTJAVA { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" - // TODO nf-core: Where possible, a command MUST be provided to obtain the version number of the software e.g. 1.10 - // If the software is unable to output a version number on the command-line then it can be manually specified - // e.g. https://github.com/nf-core/modules/blob/master/modules/homer/annotatepeaks/main.nf - // Each software used MUST provide the software name and version number in the YAML version file (versions.yml) - // TODO nf-core: It MUST be possible to pass additional parameters to the tool as a command-line string via the "task.ext.args" directive - // TODO nf-core: If the tool supports multi-threading then you MUST provide the appropriate parameter - // using the Nextflow "task" variable e.g. "--threads $task.cpus" - // TODO nf-core: Please replace the example samtools command below with your module's command - // TODO nf-core: Please indent the command appropriately (4 spaces!!) to help with readability ;) + """ vardict-java \\ $args \\ From 9108e2e2ec337978929cafc8d7cfedb656124786 Mon Sep 17 00:00:00 2001 From: Jasmin F <73216762+jasmezz@users.noreply.github.com> Date: Wed, 27 Apr 2022 12:22:05 +0200 Subject: [PATCH 173/283] Update test output checksum --- tests/modules/antismash/antismashlitedownloaddatabases/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/modules/antismash/antismashlitedownloaddatabases/test.yml b/tests/modules/antismash/antismashlitedownloaddatabases/test.yml index ac38eee3..1079363c 100644 --- a/tests/modules/antismash/antismashlitedownloaddatabases/test.yml +++ b/tests/modules/antismash/antismashlitedownloaddatabases/test.yml @@ -5,7 +5,7 @@ - antismash/antismashlitedownloaddatabases files: - path: output/antismash/versions.yml - md5sum: e2656c8d2bcc7469eba40eb1ee5c91b3 + md5sum: 24859c67023abab99de295d3675a24b6 - path: output/antismash/antismash_db - path: output/antismash/antismash_db/clusterblast - path: output/antismash/antismash_db/clustercompare From 61e740f79cbb4538483e751ad82ca71c592f3079 Mon Sep 17 00:00:00 2001 From: Jasmin F <73216762+jasmezz@users.noreply.github.com> Date: Wed, 27 Apr 2022 12:40:04 +0200 Subject: [PATCH 174/283] Add antismash download DB: missing container folders as output channels (#1567) * Add missing container folders as output channels * Apply suggestions from code review Co-authored-by: James A. Fellows Yates * Update tool name * Update test output checksum Co-authored-by: James A. Fellows Yates --- .../antismashlitedownloaddatabases/main.nf | 8 +++++-- .../antismashlitedownloaddatabases/meta.yml | 21 ++++++++++++++++--- .../antismashlitedownloaddatabases/test.yml | 7 +++++-- 3 files changed, 29 insertions(+), 7 deletions(-) diff --git a/modules/antismash/antismashlitedownloaddatabases/main.nf b/modules/antismash/antismashlitedownloaddatabases/main.nf index 1853d80a..72314eee 100644 --- a/modules/antismash/antismashlitedownloaddatabases/main.nf +++ b/modules/antismash/antismashlitedownloaddatabases/main.nf @@ -7,8 +7,9 @@ process ANTISMASH_ANTISMASHLITEDOWNLOADDATABASES { 'quay.io/biocontainers/antismash-lite:6.0.1--pyhdfd78af_1' }" /* - These files are normally downloaded by download-antismash-databases itself, and must be retrieved for input by manually running the command with conda or a standalone installation of antiSMASH. Therefore we do not recommend using this module for production pipelines, but rather require users to specify their own local copy of the antiSMASH database in pipelines. This is solely for use for CI tests of the nf-core/module version of antiSMASH. + These files are normally downloaded/created by download-antismash-databases itself, and must be retrieved for input by manually running the command with conda or a standalone installation of antiSMASH. Therefore we do not recommend using this module for production pipelines, but rather require users to specify their own local copy of the antiSMASH database in pipelines. This is solely for use for CI tests of the nf-core/module version of antiSMASH. Reason: Upon execution, the tool checks if certain database files are present within the container and if not, it tries to create them in /usr/local/bin, for which only root user has write permissions. Mounting those database files with this module prevents the tool from trying to create them. + These files are also emitted as output channels in this module to enable the antismash-lite module to use them as mount volumes to the docker/singularity containers. */ containerOptions { @@ -26,6 +27,9 @@ process ANTISMASH_ANTISMASHLITEDOWNLOADDATABASES { output: path("antismash_db") , emit: database + path("css"), emit: css_dir + path("detection"), emit: detection_dir + path("modules"), emit: modules_dir path "versions.yml", emit: versions when: @@ -40,7 +44,7 @@ process ANTISMASH_ANTISMASHLITEDOWNLOADDATABASES { cat <<-END_VERSIONS > versions.yml "${task.process}": - antismash: \$(antismash --version | sed 's/antiSMASH //') + antismash-lite: \$(antismash --version | sed 's/antiSMASH //') END_VERSIONS """ } diff --git a/modules/antismash/antismashlitedownloaddatabases/meta.yml b/modules/antismash/antismashlitedownloaddatabases/meta.yml index ad393bae..619dc8c2 100644 --- a/modules/antismash/antismashlitedownloaddatabases/meta.yml +++ b/modules/antismash/antismashlitedownloaddatabases/meta.yml @@ -27,17 +27,17 @@ input: - database_css: type: directory description: | - antismash/outputs/html/css folder which is being created during the antiSMASH database downloading step. These files are normally downloaded by download-antismash-databases itself, and must be retrieved by the use by manually running the command with conda or a standalone installation of antiSMASH. Therefore we do not recommend using this module for production pipelines, but rather require users to specify their own local copy of the antiSMASH database in pipelines. + antismash/outputs/html/css folder which is being created during the antiSMASH database downloading step. These files are normally downloaded by download-antismash-databases itself, and must be retrieved by the user by manually running the command with conda or a standalone installation of antiSMASH. Therefore we do not recommend using this module for production pipelines, but rather require users to specify their own local copy of the antiSMASH database in pipelines. pattern: "css" - database_detection: type: directory description: | - antismash/detection folder which is being created during the antiSMASH database downloading step. These files are normally downloaded by download-antismash-databases itself, and must be retrieved by the use by manually running the command with conda or a standalone installation of antiSMASH. Therefore we do not recommend using this module for production pipelines, but rather require users to specify their own local copy of the antiSMASH database in pipelines. + antismash/detection folder which is being created during the antiSMASH database downloading step. These files are normally downloaded by download-antismash-databases itself, and must be retrieved by the user by manually running the command with conda or a standalone installation of antiSMASH. Therefore we do not recommend using this module for production pipelines, but rather require users to specify their own local copy of the antiSMASH database in pipelines. pattern: "detection" - database_modules: type: directory description: | - antismash/modules folder which is being created during the antiSMASH database downloading step. These files are normally downloaded by download-antismash-databases itself, and must be retrieved by the use by manually running the command with conda or a standalone installation of antiSMASH. Therefore we do not recommend using this module for production pipelines, but rather require users to specify their own local copy of the antiSMASH database in pipelines. + antismash/modules folder which is being created during the antiSMASH database downloading step. These files are normally downloaded by download-antismash-databases itself, and must be retrieved by the user by manually running the command with conda or a standalone installation of antiSMASH. Therefore we do not recommend using this module for production pipelines, but rather require users to specify their own local copy of the antiSMASH database in pipelines. pattern: "modules" output: @@ -50,6 +50,21 @@ output: type: directory description: Download directory for antiSMASH databases pattern: "antismash_db" + - css_dir: + type: directory + description: | + antismash/outputs/html/css folder which is being created during the antiSMASH database downloading step. These files are normally downloaded by download-antismash-databases itself, and must be retrieved by the user by manually running the command with conda or a standalone installation of antiSMASH. Therefore we do not recommend using this module for production pipelines, but rather require users to specify their own local copy of the antiSMASH database in pipelines. + pattern: "css" + - detection_dir: + type: directory + description: | + antismash/detection folder which is being created during the antiSMASH database downloading step. These files are normally downloaded by download-antismash-databases itself, and must be retrieved by the user by manually running the command with conda or a standalone installation of antiSMASH. Therefore we do not recommend using this module for production pipelines, but rather require users to specify their own local copy of the antiSMASH database in pipelines. + pattern: "detection" + - modules_dir: + type: directory + description: | + antismash/modules folder which is being created during the antiSMASH database downloading step. These files are normally downloaded by download-antismash-databases itself, and must be retrieved by the user by manually running the command with conda or a standalone installation of antiSMASH. Therefore we do not recommend using this module for production pipelines, but rather require users to specify their own local copy of the antiSMASH database in pipelines. + pattern: "modules" authors: - "@jasmezz" diff --git a/tests/modules/antismash/antismashlitedownloaddatabases/test.yml b/tests/modules/antismash/antismashlitedownloaddatabases/test.yml index 3493bb4b..1079363c 100644 --- a/tests/modules/antismash/antismashlitedownloaddatabases/test.yml +++ b/tests/modules/antismash/antismashlitedownloaddatabases/test.yml @@ -1,14 +1,17 @@ - name: antismash antismashlitedownloaddatabases test_antismash_antismashlitedownloaddatabases command: nextflow run tests/modules/antismash/antismashlitedownloaddatabases -entry test_antismash_antismashlitedownloaddatabases -c tests/config/nextflow.config tags: - - antismash/antismashlitedownloaddatabases - antismash + - antismash/antismashlitedownloaddatabases files: - path: output/antismash/versions.yml - md5sum: e2656c8d2bcc7469eba40eb1ee5c91b3 + md5sum: 24859c67023abab99de295d3675a24b6 - path: output/antismash/antismash_db - path: output/antismash/antismash_db/clusterblast - path: output/antismash/antismash_db/clustercompare - path: output/antismash/antismash_db/pfam - path: output/antismash/antismash_db/resfam - path: output/antismash/antismash_db/tigrfam + - path: output/antismash/css + - path: output/antismash/detection + - path: output/antismash/modules From 7956d38e61a8efdd2fffcb6cf33f4a02ad73bc50 Mon Sep 17 00:00:00 2001 From: Nicolas Vannieuwkerke Date: Wed, 27 Apr 2022 13:18:46 +0200 Subject: [PATCH 175/283] Fixed the test on nextflow level --- modules/vardictjava/main.nf | 4 +-- modules/vardictjava/meta.yml | 46 ++++++++++++++++++------------ tests/modules/vardictjava/main.nf | 8 ++++-- tests/modules/vardictjava/test.yml | 2 +- 4 files changed, 36 insertions(+), 24 deletions(-) diff --git a/modules/vardictjava/main.nf b/modules/vardictjava/main.nf index 80beb540..8170bf76 100644 --- a/modules/vardictjava/main.nf +++ b/modules/vardictjava/main.nf @@ -13,7 +13,7 @@ process VARDICTJAVA { path(regions_of_interest) output: - tuple val(meta), path("*.bam"), emit: bam + tuple val(meta), path("*.vcf"), emit: vcf path "versions.yml" , emit: versions when: @@ -34,7 +34,7 @@ process VARDICTJAVA { cat <<-END_VERSIONS > versions.yml "${task.process}": - vardictjava: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//' )) + vardictjava: \$(echo 1.8.3) END_VERSIONS """ } diff --git a/modules/vardictjava/meta.yml b/modules/vardictjava/meta.yml index e8c55de3..7820a877 100644 --- a/modules/vardictjava/meta.yml +++ b/modules/vardictjava/meta.yml @@ -1,19 +1,19 @@ name: "vardictjava" -## TODO nf-core: Add a description of the module and list keywords -description: write your description here + +description: The Java port of the VarDict variant caller keywords: - - sort + - variant calling + - VarDict + - AstraZeneca tools: - "vardictjava": - ## TODO nf-core: Add a description and other details for the software below description: "Java port of the VarDict variant discovery program" - homepage: "None" - documentation: "None" - tool_dev_url: "None" - doi: "" + homepage: "https://github.com/AstraZeneca-NGS/VarDictJava" + documentation: "https://github.com/AstraZeneca-NGS/VarDictJava" + tool_dev_url: "https://github.com/AstraZeneca-NGS/VarDictJava" + doi: "10.1093/nar/gkw227 " licence: "['MIT']" -## TODO nf-core: Add a description of all of the variables used as input input: # Only when we have meta - meta: @@ -21,14 +21,22 @@ input: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - # - ## TODO nf-core: Delete / customise this example input + - bam: type: file - description: BAM/CRAM/SAM file - pattern: "*.{bam,cram,sam}" + description: BAM/SAM file + pattern: "*.{bam,sam}" + + - reference_fasta: + type: file + description: FASTA of the reference genome + pattern: "*.{fa,fasta}" + + - regions_of_interest: + type: file + description: BED with the regions of interest + pattern: "*.bed" -## TODO nf-core: Add a description of all of the variables used as output output: #Only when we have meta - meta: @@ -36,16 +44,16 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - # + - versions: type: file description: File containing software versions pattern: "versions.yml" - ## TODO nf-core: Delete / customise this example output - - bam: + + - vcf: type: file - description: Sorted BAM/CRAM/SAM file - pattern: "*.{bam,cram,sam}" + description: VCF file output + pattern: "*.vcf" authors: - "@nvnieuwk" diff --git a/tests/modules/vardictjava/main.nf b/tests/modules/vardictjava/main.nf index 8a714b2b..7511b529 100644 --- a/tests/modules/vardictjava/main.nf +++ b/tests/modules/vardictjava/main.nf @@ -8,8 +8,12 @@ workflow test_vardictjava { input = [ [ id:'test', single_end:false ], // meta map - file(params.test_data['homo_sapiens']['illumina']['test_paired_end_bam'], checkIfExists: true) + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) ] - VARDICTJAVA ( input ) + VARDICTJAVA ( + input, + file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true), + file(params.test_data['homo_sapiens']['genome']['genome_bed'], checkIfExists: true) + ) } diff --git a/tests/modules/vardictjava/test.yml b/tests/modules/vardictjava/test.yml index 73c647af..001a56e8 100644 --- a/tests/modules/vardictjava/test.yml +++ b/tests/modules/vardictjava/test.yml @@ -6,7 +6,7 @@ - "vardictjava" # files: - - path: "output/vardictjava/test.bam" + - path: "output/vardictjava/test.vcf" md5sum: e667c7caad0bc4b7ac383fd023c654fc - path: output/vardictjava/versions.yml md5sum: a01fe51bc4c6a3a6226fbf77b2c7cf3b From 10ca39a86f1b227a5535238d02398fec686eba72 Mon Sep 17 00:00:00 2001 From: "Maxime U. Garcia" Date: Wed, 27 Apr 2022 16:03:44 +0200 Subject: [PATCH 176/283] add intervals possibilities to splitncigarreads (#1571) --- modules/gatk4/splitncigarreads/main.nf | 4 +++- modules/gatk4/splitncigarreads/meta.yml | 7 +++++++ tests/modules/gatk4/splitncigarreads/main.nf | 18 +++++++++++++++++- tests/modules/gatk4/splitncigarreads/test.yml | 11 ++++++++++- 4 files changed, 37 insertions(+), 3 deletions(-) diff --git a/modules/gatk4/splitncigarreads/main.nf b/modules/gatk4/splitncigarreads/main.nf index f7c559d9..85e5daa8 100644 --- a/modules/gatk4/splitncigarreads/main.nf +++ b/modules/gatk4/splitncigarreads/main.nf @@ -8,7 +8,7 @@ process GATK4_SPLITNCIGARREADS { 'quay.io/biocontainers/gatk4:4.2.5.0--hdfd78af_0' }" input: - tuple val(meta), path(bam) + tuple val(meta), path(bam), path(bai), path(intervals) path fasta path fai path dict @@ -23,6 +23,7 @@ process GATK4_SPLITNCIGARREADS { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" + def interval_command = intervals ? "--intervals $intervals" : "" def avail_mem = 3 if (!task.memory) { @@ -35,6 +36,7 @@ process GATK4_SPLITNCIGARREADS { --input $bam \\ --output ${prefix}.bam \\ --reference $fasta \\ + $interval_command \\ --tmp-dir . \\ $args diff --git a/modules/gatk4/splitncigarreads/meta.yml b/modules/gatk4/splitncigarreads/meta.yml index 407e80bd..76bfdcd3 100644 --- a/modules/gatk4/splitncigarreads/meta.yml +++ b/modules/gatk4/splitncigarreads/meta.yml @@ -23,6 +23,13 @@ input: type: list description: BAM/SAM/CRAM file containing reads pattern: "*.{bam,sam,cram}" + - bai: + type: list + description: BAI/SAI/CRAI index file (optional) + pattern: "*.{bai,sai,crai}" + - intervals: + type: file + description: Bed file with the genomic regions included in the library (optional) - fasta: type: file description: The reference fasta file diff --git a/tests/modules/gatk4/splitncigarreads/main.nf b/tests/modules/gatk4/splitncigarreads/main.nf index 7e5b7c9a..31e45cec 100644 --- a/tests/modules/gatk4/splitncigarreads/main.nf +++ b/tests/modules/gatk4/splitncigarreads/main.nf @@ -6,7 +6,23 @@ include { GATK4_SPLITNCIGARREADS } from '../../../../modules/gatk4/splitncigarre workflow test_gatk4_splitncigarreads { input = [ [ id:'test' ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true) ] + file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true), + [], + [] + ] + + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + fai = file(params.test_data['sarscov2']['genome']['genome_fasta_fai'], checkIfExists: true) + dict = file(params.test_data['sarscov2']['genome']['genome_dict'], checkIfExists: true) + + GATK4_SPLITNCIGARREADS ( input, fasta, fai, dict ) +} + +workflow test_gatk4_splitncigarreads_intervals { + input = [ [ id:'test' ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true), + file(params.test_data['sarscov2']['genome']['test_bed'], checkIfExists: true) ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) fai = file(params.test_data['sarscov2']['genome']['genome_fasta_fai'], checkIfExists: true) diff --git a/tests/modules/gatk4/splitncigarreads/test.yml b/tests/modules/gatk4/splitncigarreads/test.yml index c38064e2..d9a58901 100644 --- a/tests/modules/gatk4/splitncigarreads/test.yml +++ b/tests/modules/gatk4/splitncigarreads/test.yml @@ -5,5 +5,14 @@ - gatk4/splitncigarreads files: - path: output/gatk4/test.bam - md5sum: ceed15c0bd64ff5c38d3816905933b0b + md5sum: 436d8e31285c6b588bdd1c7f1d07f6f2 + - path: output/gatk4/versions.yml +- name: gatk4 splitncigarreads test_gatk4_splitncigarreads_intervals + command: nextflow run tests/modules/gatk4/splitncigarreads -entry test_gatk4_splitncigarreads_intervals -c tests/config/nextflow.config + tags: + - gatk4 + - gatk4/splitncigarreads + files: + - path: output/gatk4/test.bam + md5sum: cd56e3225950f519fd47164cca60a0bb - path: output/gatk4/versions.yml From 213403187932dbbdd936a04474cc8cd8abae7a08 Mon Sep 17 00:00:00 2001 From: Harshil Patel Date: Wed, 27 Apr 2022 18:15:11 +0100 Subject: [PATCH 177/283] Bump SAMtools version for custom/getchromsizes (#1572) --- modules/custom/getchromsizes/main.nf | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/modules/custom/getchromsizes/main.nf b/modules/custom/getchromsizes/main.nf index bbcfa9be..0eabf3a4 100644 --- a/modules/custom/getchromsizes/main.nf +++ b/modules/custom/getchromsizes/main.nf @@ -2,10 +2,10 @@ process CUSTOM_GETCHROMSIZES { tag "$fasta" label 'process_low' - conda (params.enable_conda ? "bioconda::samtools=1.15" : null) + conda (params.enable_conda ? "bioconda::samtools=1.15.1" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/samtools:1.15--h1170115_1' : - 'quay.io/biocontainers/samtools:1.15--h1170115_1' }" + 'https://depot.galaxyproject.org/singularity/samtools:1.15.1--h1170115_0' : + 'quay.io/biocontainers/samtools:1.15.1--h1170115_0' }" input: path fasta From 1b5d3f5ac2ae61ee35dece20a2aeb8018b6438ce Mon Sep 17 00:00:00 2001 From: Harshil Patel Date: Wed, 27 Apr 2022 19:21:26 +0100 Subject: [PATCH 178/283] Bump STAR version to 2.7.10a for RSEM modules (#1573) * Bump STAR version to 2.7.10a for RSEM modules * Fix tests --- modules/rsem/calculateexpression/main.nf | 6 +++--- modules/rsem/preparereference/main.nf | 6 +++--- tests/config/test_data.config | 4 ++-- tests/modules/rsem/calculateexpression/test.yml | 4 ++-- 4 files changed, 10 insertions(+), 10 deletions(-) diff --git a/modules/rsem/calculateexpression/main.nf b/modules/rsem/calculateexpression/main.nf index cf147a63..1ab3a635 100644 --- a/modules/rsem/calculateexpression/main.nf +++ b/modules/rsem/calculateexpression/main.nf @@ -2,10 +2,10 @@ process RSEM_CALCULATEEXPRESSION { tag "$meta.id" label 'process_high' - conda (params.enable_conda ? "bioconda::rsem=1.3.3 bioconda::star=2.7.6a" : null) + conda (params.enable_conda ? "bioconda::rsem=1.3.3 bioconda::star=2.7.10a" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/mulled-v2-cf0123ef83b3c38c13e3b0696a3f285d3f20f15b:606b713ec440e799d53a2b51a6e79dbfd28ecf3e-0' : - 'quay.io/biocontainers/mulled-v2-cf0123ef83b3c38c13e3b0696a3f285d3f20f15b:606b713ec440e799d53a2b51a6e79dbfd28ecf3e-0' }" + 'https://depot.galaxyproject.org/singularity/mulled-v2-cf0123ef83b3c38c13e3b0696a3f285d3f20f15b:64aad4a4e144878400649e71f42105311be7ed87-0' : + 'quay.io/biocontainers/mulled-v2-cf0123ef83b3c38c13e3b0696a3f285d3f20f15b:64aad4a4e144878400649e71f42105311be7ed87-0' }" input: tuple val(meta), path(reads) diff --git a/modules/rsem/preparereference/main.nf b/modules/rsem/preparereference/main.nf index 2d2ca205..da11be45 100644 --- a/modules/rsem/preparereference/main.nf +++ b/modules/rsem/preparereference/main.nf @@ -2,10 +2,10 @@ process RSEM_PREPAREREFERENCE { tag "$fasta" label 'process_high' - conda (params.enable_conda ? "bioconda::rsem=1.3.3 bioconda::star=2.7.6a" : null) + conda (params.enable_conda ? "bioconda::rsem=1.3.3 bioconda::star=2.7.10a" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/mulled-v2-cf0123ef83b3c38c13e3b0696a3f285d3f20f15b:606b713ec440e799d53a2b51a6e79dbfd28ecf3e-0' : - 'quay.io/biocontainers/mulled-v2-cf0123ef83b3c38c13e3b0696a3f285d3f20f15b:606b713ec440e799d53a2b51a6e79dbfd28ecf3e-0' }" + 'https://depot.galaxyproject.org/singularity/mulled-v2-cf0123ef83b3c38c13e3b0696a3f285d3f20f15b:64aad4a4e144878400649e71f42105311be7ed87-0' : + 'quay.io/biocontainers/mulled-v2-cf0123ef83b3c38c13e3b0696a3f285d3f20f15b:64aad4a4e144878400649e71f42105311be7ed87-0' }" input: path fasta, stageAs: "rsem/*" diff --git a/tests/config/test_data.config b/tests/config/test_data.config index f6ea242d..b3171a51 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -245,8 +245,8 @@ params { test2_2_fastq_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/fastq/test2_2.fastq.gz" test2_umi_1_fastq_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/fastq/test2.umi_1.fastq.gz" test2_umi_2_fastq_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/fastq/test2.umi_2.fastq.gz" - test_rnaseq_1_fastq_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/fastq/test.rnaseq_1.fastq.gz" - test_rnaseq_2_fastq_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/fastq/test.rnaseq_2.fastq.gz" + test_rnaseq_1_fastq_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/fastq/test_rnaseq_1.fastq.gz" + test_rnaseq_2_fastq_gz = "${test_data_dir}/genomics/homo_sapiens/illumina/fastq/test_rnaseq_2.fastq.gz" test_baserecalibrator_table = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/test.baserecalibrator.table" test2_baserecalibrator_table = "${test_data_dir}/genomics/homo_sapiens/illumina/gatk/test2.baserecalibrator.table" diff --git a/tests/modules/rsem/calculateexpression/test.yml b/tests/modules/rsem/calculateexpression/test.yml index f19c3398..b0251de9 100644 --- a/tests/modules/rsem/calculateexpression/test.yml +++ b/tests/modules/rsem/calculateexpression/test.yml @@ -42,7 +42,7 @@ - path: output/rsem/rsem/genome.transcripts.fa md5sum: 050c521a2719c2ae48267c1e65218f29 - path: output/rsem/rsem/genomeParameters.txt - md5sum: 2fe3a030e1706c3e8cd4df3818e6dd2f + md5sum: df5a456e3242520cc36e0083a6a7d9dd - path: output/rsem/rsem/sjdbInfo.txt md5sum: 5690ea9d9f09f7ff85b7fd47bd234903 - path: output/rsem/rsem/sjdbList.fromGTF.out.tab @@ -63,4 +63,4 @@ - path: output/rsem/test.stat/test.theta md5sum: de2e4490c98cc5383a86ae8225fd0a28 - path: output/rsem/test.transcript.bam - md5sum: 7846491086c478858419667d60f18edd + md5sum: ed681d39f5700ffc74d6321525330d93 From 839ee59ca1646d055dced78125d88d8a864cf651 Mon Sep 17 00:00:00 2001 From: jvhagey Date: Wed, 27 Apr 2022 18:01:28 -0400 Subject: [PATCH 179/283] adding srst2 module --- modules/srst2/srst2/main.nf | 47 ++++++++++++++++ modules/srst2/srst2/meta.yml | 68 +++++++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/modules/srst2/srst2/main.nf | 28 ++++++++++ tests/modules/srst2/srst2/nextflow.config | 5 ++ tests/modules/srst2/srst2/test.yml | 29 ++++++++++ 6 files changed, 181 insertions(+) create mode 100644 modules/srst2/srst2/main.nf create mode 100644 modules/srst2/srst2/meta.yml create mode 100644 tests/modules/srst2/srst2/main.nf create mode 100644 tests/modules/srst2/srst2/nextflow.config create mode 100644 tests/modules/srst2/srst2/test.yml diff --git a/modules/srst2/srst2/main.nf b/modules/srst2/srst2/main.nf new file mode 100644 index 00000000..fc665fad --- /dev/null +++ b/modules/srst2/srst2/main.nf @@ -0,0 +1,47 @@ +process SRST2_SRST2 { + tag "${meta.id}" + label 'process_low' + + conda (params.enable_conda ? "bioconda::srst2=0.2.0" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/srst2%3A0.2.0--py_4': + 'quay.io/biocontainers/srst2:0.2.0--py_4'}" + + input: + tuple val(meta), path(fastq_s), path(db) + + output: + tuple val(meta), path("*_genes_*_results.txt") , emit: gene_results + tuple val(meta), path("*_fullgenes_*_results.txt") , optional:true, emit: fullgene_results + tuple val(meta), path("*.pileup") , emit: pileup + tuple val(meta), path("*.sorted.bam") , emit: sorted_bam + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: "" + def prefix = task.ext.prefix ?: "${meta.id}" + def read_s = meta.single_end ? "--input_se ${fastq_s}" : "--input_pe ${fastq_s[0]} ${fastq_s[1]}" + if (meta.db=="gene") { + database = "--gene_db ${db}" + } else if (meta.db=="mlst") { + database = "--mlst_db ${db}" + } else { + return + } + """ + srst2 \\ + ${read_s} \\ + --threads $task.cpus \\ + --output ${prefix} \\ + ${database} \\ + $args + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + srst2: \$(echo \$(srst2 --version 2>&1) | sed 's/srst2 //' )) + END_VERSIONS + """ +} diff --git a/modules/srst2/srst2/meta.yml b/modules/srst2/srst2/meta.yml new file mode 100644 index 00000000..2c44f53d --- /dev/null +++ b/modules/srst2/srst2/meta.yml @@ -0,0 +1,68 @@ +name: srst2_srst2 +description: | + Short Read Sequence Typing for Bacterial Pathogens is a program designed to take Illumina sequence data, + a MLST database and/or a database of gene sequences (e.g. resistance genes, virulence genes, etc) + and report the presence of STs and/or reference genes. +keywords: + - mlst + - typing + - illumina +tools: + - srst2: + description: "Short Read Sequence Typing for Bacterial Pathogens" + homepage: {http://katholt.github.io/srst2/} + documentation: {https://github.com/katholt/srst2/blob/master/README.md} + tool_dev_url: {https://github.com/katholt/srst2} + doi: "" + licence: ['BSD'] + +input: + - meta: + type: map0.2.0-4 + description: | + Groovy Map containing sample information + id: should be the identification number or sample name + single_end: should be true for single end data and false for paired in data + db: should be either 'gene' to use the --gene_db option or "mlst" to use the --mlst_db option + e.g. [ id:'sample', single_end:false , db:'gene'] + - fasta: + type: file + description: | + gzipped fasta file. If files are NOT in + MiSeq format sample_S1_L001_R1_001.fastq.gz uses --forward and --reverse parameters; otherwise + default is _1, i.e. expect forward reads as sample_1.fastq.gz). + pattern: "*.fastq.gz" + - db: + type: file + description: Database in FASTA format + pattern: "*.fasta" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'sample', single_end:false ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - txt: + type: file + description: results text file + pattern: "*_fullgenes_*_results.txt" + - txt: + type: file + description: results text file + pattern: "*_genes_*_results.txt" + - bam: + type: file + description: Sorted BAM file + pattern: "*.sorted.bam" + - pileup: + type: file + description: SAMtools pileup file + pattern: "*.pileup" + +authors: + - "@jvhagey" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 4d8ce0b5..66f50903 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -1767,6 +1767,10 @@ sratools/prefetch: - modules/sratools/prefetch/** - tests/modules/sratools/prefetch/** +srst2/srst2: + - modules/srst2/srst2/** + - tests/modules/srst2/srst2/** + ssuissero: - modules/ssuissero/** - tests/modules/ssuissero/** diff --git a/tests/modules/srst2/srst2/main.nf b/tests/modules/srst2/srst2/main.nf new file mode 100644 index 00000000..235f3ff9 --- /dev/null +++ b/tests/modules/srst2/srst2/main.nf @@ -0,0 +1,28 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { SRST2_SRST2 } from '../../../../modules/srst2/srst2/main.nf' + +workflow test_srst2_srst2_paired_end { + + input = [ + [ id:'test', single_end:false, db:"gene"], // meta map + [ file(params.test_data['bacteroides_fragilis']['illumina']['test1_1_fastq_gz'], checkIfExists: true), + file(params.test_data['bacteroides_fragilis']['illumina']['test1_2_fastq_gz'], checkIfExists: true) ], + file('https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/delete_me/srst2/resFinder_20180221_srst2.fasta') // Change to params.test_data syntax after the data is included in tests/config/test_data.config + ] + + SRST2_SRST2(input) +} + +workflow test_srst2_srst2_single_end { + + input = [ + [ id:'test', single_end:true, db:"gene" ], // meta map + file(params.test_data['bacteroides_fragilis']['illumina']['test1_1_fastq_gz'], checkIfExists: true), + file('https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/delete_me/srst2/resFinder_20180221_srst2.fasta') // Change to params.test_data syntax after the data is included in tests/config/test_data.config + ] + + SRST2_SRST2(input) +} \ No newline at end of file diff --git a/tests/modules/srst2/srst2/nextflow.config b/tests/modules/srst2/srst2/nextflow.config new file mode 100644 index 00000000..50f50a7a --- /dev/null +++ b/tests/modules/srst2/srst2/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} \ No newline at end of file diff --git a/tests/modules/srst2/srst2/test.yml b/tests/modules/srst2/srst2/test.yml new file mode 100644 index 00000000..7afd0687 --- /dev/null +++ b/tests/modules/srst2/srst2/test.yml @@ -0,0 +1,29 @@ +- name: srst2 srst2 test_srst2_srst2_paired_end + command: nextflow run tests/modules/srst2/srst2 -entry test_srst2_srst2_paired_end -c tests/config/nextflow.config + tags: + - srst2/srst2 + - srst2 + files: + - path: /tmp/tmp03v2z3fu/srst2/test__genes__resFinder_20180221_srst2__results.txt + md5sum: 099aa6cacec5524b311f606debdfb3a9 + - path: /tmp/tmp03v2z3fu/srst2/test__test1.resFinder_20180221_srst2.pileup + md5sum: 64b512ff495b828c456405ec7b676ad1 + - path: /tmp/tmp03v2z3fu/srst2/test__test1.resFinder_20180221_srst2.sorted.bam + - path: /tmp/tmp03v2z3fu/srst2/versions.yml + md5sum: b446a70f1a2b4f60757829bcd744a214 + +- name: srst2 srst2 test_srst2_srst2_single_end + command: nextflow run tests/modules/srst2/srst2 -entry test_srst2_srst2_single_end -c tests/config/nextflow.config + tags: + - srst2/srst2 + - srst2 + files: + - path: /tmp/tmp3kabyta4/srst2/test__fullgenes__resFinder_20180221_srst2__results.txt + md5sum: d0762ef8c38afd0e0a34cce52ed1a3db + - path: /tmp/tmp3kabyta4/srst2/test__genes__resFinder_20180221_srst2__results.txt + md5sum: b8850c6644406d8b131e471ecc3f9013 + - path: /tmp/tmp3kabyta4/srst2/test__test1_1.resFinder_20180221_srst2.pileup + md5sum: 5f6279dc8124aa762a9dfe3d7a871277 + - path: /tmp/tmp3kabyta4/srst2/test__test1_1.resFinder_20180221_srst2.sorted.bam + - path: /tmp/tmp3kabyta4/srst2/versions.yml + md5sum: 790fe00493c6634d17801a930073218b From a575b91254480f964aa4e105f8fab09d0ecd8451 Mon Sep 17 00:00:00 2001 From: "Jill V. Hagey, PhD" Date: Wed, 27 Apr 2022 19:54:33 -0400 Subject: [PATCH 180/283] Update test.yml --- tests/modules/srst2/srst2/test.yml | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/tests/modules/srst2/srst2/test.yml b/tests/modules/srst2/srst2/test.yml index 7afd0687..f7d058de 100644 --- a/tests/modules/srst2/srst2/test.yml +++ b/tests/modules/srst2/srst2/test.yml @@ -1,29 +1,29 @@ - name: srst2 srst2 test_srst2_srst2_paired_end command: nextflow run tests/modules/srst2/srst2 -entry test_srst2_srst2_paired_end -c tests/config/nextflow.config tags: - - srst2/srst2 - srst2 + - srst2/srst2 files: - - path: /tmp/tmp03v2z3fu/srst2/test__genes__resFinder_20180221_srst2__results.txt + - path: /tmp/tmpsqtj3g4l/srst2/test__genes__resFinder_20180221_srst2__results.txt md5sum: 099aa6cacec5524b311f606debdfb3a9 - - path: /tmp/tmp03v2z3fu/srst2/test__test1.resFinder_20180221_srst2.pileup + - path: /tmp/tmpsqtj3g4l/srst2/test__test1.resFinder_20180221_srst2.pileup md5sum: 64b512ff495b828c456405ec7b676ad1 - - path: /tmp/tmp03v2z3fu/srst2/test__test1.resFinder_20180221_srst2.sorted.bam - - path: /tmp/tmp03v2z3fu/srst2/versions.yml + - path: /tmp/tmpsqtj3g4l/srst2/test__test1.resFinder_20180221_srst2.sorted.bam + - path: /tmp/tmpsqtj3g4l/srst2/versions.yml md5sum: b446a70f1a2b4f60757829bcd744a214 - name: srst2 srst2 test_srst2_srst2_single_end command: nextflow run tests/modules/srst2/srst2 -entry test_srst2_srst2_single_end -c tests/config/nextflow.config tags: - - srst2/srst2 - srst2 + - srst2/srst2 files: - - path: /tmp/tmp3kabyta4/srst2/test__fullgenes__resFinder_20180221_srst2__results.txt + - path: /tmp/tmpo9au1v02/srst2/test__fullgenes__resFinder_20180221_srst2__results.txt md5sum: d0762ef8c38afd0e0a34cce52ed1a3db - - path: /tmp/tmp3kabyta4/srst2/test__genes__resFinder_20180221_srst2__results.txt + - path: /tmp/tmpo9au1v02/srst2/test__genes__resFinder_20180221_srst2__results.txt md5sum: b8850c6644406d8b131e471ecc3f9013 - - path: /tmp/tmp3kabyta4/srst2/test__test1_1.resFinder_20180221_srst2.pileup + - path: /tmp/tmpo9au1v02/srst2/test__test1_1.resFinder_20180221_srst2.pileup md5sum: 5f6279dc8124aa762a9dfe3d7a871277 - - path: /tmp/tmp3kabyta4/srst2/test__test1_1.resFinder_20180221_srst2.sorted.bam - - path: /tmp/tmp3kabyta4/srst2/versions.yml + - path: /tmp/tmpo9au1v02/srst2/test__test1_1.resFinder_20180221_srst2.sorted.bam + - path: /tmp/tmpo9au1v02/srst2/versions.yml md5sum: 790fe00493c6634d17801a930073218b From ecd31fd3cdc1646bb80e5b04a52cfc3e4666b18b Mon Sep 17 00:00:00 2001 From: "Jill V. Hagey, PhD" Date: Wed, 27 Apr 2022 19:55:42 -0400 Subject: [PATCH 181/283] remove whitespaces and prettier fix --- modules/srst2/srst2/meta.yml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/modules/srst2/srst2/meta.yml b/modules/srst2/srst2/meta.yml index 2c44f53d..31cf4de0 100644 --- a/modules/srst2/srst2/meta.yml +++ b/modules/srst2/srst2/meta.yml @@ -1,7 +1,7 @@ name: srst2_srst2 description: | - Short Read Sequence Typing for Bacterial Pathogens is a program designed to take Illumina sequence data, - a MLST database and/or a database of gene sequences (e.g. resistance genes, virulence genes, etc) + Short Read Sequence Typing for Bacterial Pathogens is a program designed to take Illumina sequence data, + a MLST database and/or a database of gene sequences (e.g. resistance genes, virulence genes, etc) and report the presence of STs and/or reference genes. keywords: - mlst @@ -10,11 +10,11 @@ keywords: tools: - srst2: description: "Short Read Sequence Typing for Bacterial Pathogens" - homepage: {http://katholt.github.io/srst2/} - documentation: {https://github.com/katholt/srst2/blob/master/README.md} - tool_dev_url: {https://github.com/katholt/srst2} + homepage: { http://katholt.github.io/srst2/ } + documentation: { https://github.com/katholt/srst2/blob/master/README.md } + tool_dev_url: { https://github.com/katholt/srst2 } doi: "" - licence: ['BSD'] + licence: ["BSD"] input: - meta: @@ -49,11 +49,11 @@ output: pattern: "versions.yml" - txt: type: file - description: results text file + description: results text file pattern: "*_fullgenes_*_results.txt" - txt: type: file - description: results text file + description: results text file pattern: "*_genes_*_results.txt" - bam: type: file From b0c5f9422bf4f48a6799b42e9302fe71c0e9bf0f Mon Sep 17 00:00:00 2001 From: "Jill V. Hagey, PhD" Date: Wed, 27 Apr 2022 20:21:41 -0400 Subject: [PATCH 182/283] tmp/ to changed to output/ --- tests/modules/srst2/srst2/test.yml | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/tests/modules/srst2/srst2/test.yml b/tests/modules/srst2/srst2/test.yml index f7d058de..f011b4d1 100644 --- a/tests/modules/srst2/srst2/test.yml +++ b/tests/modules/srst2/srst2/test.yml @@ -4,12 +4,12 @@ - srst2 - srst2/srst2 files: - - path: /tmp/tmpsqtj3g4l/srst2/test__genes__resFinder_20180221_srst2__results.txt + - path: output/srst2/test__genes__resFinder_20180221_srst2__results.txt md5sum: 099aa6cacec5524b311f606debdfb3a9 - - path: /tmp/tmpsqtj3g4l/srst2/test__test1.resFinder_20180221_srst2.pileup + - path: output/srst2/test__test1.resFinder_20180221_srst2.pileup md5sum: 64b512ff495b828c456405ec7b676ad1 - - path: /tmp/tmpsqtj3g4l/srst2/test__test1.resFinder_20180221_srst2.sorted.bam - - path: /tmp/tmpsqtj3g4l/srst2/versions.yml + - path: output/srst2/test__test1.resFinder_20180221_srst2.sorted.bam + - path: output/srst2/versions.yml md5sum: b446a70f1a2b4f60757829bcd744a214 - name: srst2 srst2 test_srst2_srst2_single_end @@ -18,12 +18,12 @@ - srst2 - srst2/srst2 files: - - path: /tmp/tmpo9au1v02/srst2/test__fullgenes__resFinder_20180221_srst2__results.txt + - path: output/srst2/test__fullgenes__resFinder_20180221_srst2__results.txt md5sum: d0762ef8c38afd0e0a34cce52ed1a3db - - path: /tmp/tmpo9au1v02/srst2/test__genes__resFinder_20180221_srst2__results.txt + - path: output/srst2/test__genes__resFinder_20180221_srst2__results.txt md5sum: b8850c6644406d8b131e471ecc3f9013 - - path: /tmp/tmpo9au1v02/srst2/test__test1_1.resFinder_20180221_srst2.pileup + - path: output/srst2/test__test1_1.resFinder_20180221_srst2.pileup md5sum: 5f6279dc8124aa762a9dfe3d7a871277 - - path: /tmp/tmpo9au1v02/srst2/test__test1_1.resFinder_20180221_srst2.sorted.bam - - path: /tmp/tmpo9au1v02/srst2/versions.yml + - path: output/srst2/test__test1_1.resFinder_20180221_srst2.sorted.bam + - path: output/srst2/versions.yml md5sum: 790fe00493c6634d17801a930073218b From 85c49a971f91208ead572212be8475f5e7518feb Mon Sep 17 00:00:00 2001 From: Nicolas Vannieuwkerke Date: Thu, 28 Apr 2022 09:07:52 +0200 Subject: [PATCH 183/283] Fixed an issue with wrong file order --- modules/vardictjava/main.nf | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/modules/vardictjava/main.nf b/modules/vardictjava/main.nf index 8170bf76..855784db 100644 --- a/modules/vardictjava/main.nf +++ b/modules/vardictjava/main.nf @@ -8,9 +8,8 @@ process VARDICTJAVA { 'quay.io/biocontainers/vardict-java:1.8.3--hdfd78af_0' }" input: - tuple val(meta), path(bam) + tuple val(meta), path(bam), path(regions_of_interest) path(reference_fasta) - path(regions_of_interest) output: tuple val(meta), path("*.vcf"), emit: vcf @@ -24,6 +23,9 @@ process VARDICTJAVA { def prefix = task.ext.prefix ?: "${meta.id}" """ + head -n 20 $reference_fasta + cat $reference_fasta | wc -l + vardict-java \\ $args \\ -b $bam \\ @@ -31,10 +33,11 @@ process VARDICTJAVA { -n $prefix \\ -G $reference_fasta \\ $regions_of_interest \\ + > ${prefix}.vcf cat <<-END_VERSIONS > versions.yml "${task.process}": - vardictjava: \$(echo 1.8.3) + vardict-java: \$(echo 1.8.3) END_VERSIONS """ } From db43095b1a1530af1e3cac7f3e7211c52d87bbd7 Mon Sep 17 00:00:00 2001 From: Nicolas Vannieuwkerke Date: Thu, 28 Apr 2022 09:18:15 +0200 Subject: [PATCH 184/283] Split the bed file into a seperate input --- modules/vardictjava/main.nf | 8 +++----- tests/modules/vardictjava/main.nf | 14 +++++++------- 2 files changed, 10 insertions(+), 12 deletions(-) diff --git a/modules/vardictjava/main.nf b/modules/vardictjava/main.nf index 855784db..e1098b22 100644 --- a/modules/vardictjava/main.nf +++ b/modules/vardictjava/main.nf @@ -8,7 +8,8 @@ process VARDICTJAVA { 'quay.io/biocontainers/vardict-java:1.8.3--hdfd78af_0' }" input: - tuple val(meta), path(bam), path(regions_of_interest) + tuple val(meta), path(bam) + path(regions_of_interest) path(reference_fasta) output: @@ -23,14 +24,11 @@ process VARDICTJAVA { def prefix = task.ext.prefix ?: "${meta.id}" """ - head -n 20 $reference_fasta - cat $reference_fasta | wc -l - vardict-java \\ $args \\ -b $bam \\ -th $task.cpus \\ - -n $prefix \\ + -N $prefix \\ -G $reference_fasta \\ $regions_of_interest \\ > ${prefix}.vcf diff --git a/tests/modules/vardictjava/main.nf b/tests/modules/vardictjava/main.nf index 7511b529..647c8bba 100644 --- a/tests/modules/vardictjava/main.nf +++ b/tests/modules/vardictjava/main.nf @@ -6,14 +6,14 @@ include { VARDICTJAVA } from '../../../modules/vardictjava/main.nf' workflow test_vardictjava { - input = [ + bam_input_ch = Channel.of([ [ id:'test', single_end:false ], // meta map file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) - ] + ]) - VARDICTJAVA ( - input, - file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true), - file(params.test_data['homo_sapiens']['genome']['genome_bed'], checkIfExists: true) - ) + bed = file(params.test_data['homo_sapiens']['genome']['genome_bed'], checkIfExists: true) + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + + VARDICTJAVA ( bam_input_ch, bed, fasta ) } From def580ddb7af11619f8f0757ef5af5ed156ed6a4 Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Thu, 28 Apr 2022 09:28:57 +0200 Subject: [PATCH 185/283] Add gzip for compressed output --- modules/vardictjava/main.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/vardictjava/main.nf b/modules/vardictjava/main.nf index e1098b22..425c1706 100644 --- a/modules/vardictjava/main.nf +++ b/modules/vardictjava/main.nf @@ -31,7 +31,7 @@ process VARDICTJAVA { -N $prefix \\ -G $reference_fasta \\ $regions_of_interest \\ - > ${prefix}.vcf + | gzip -c > ${prefix}.vcf.gz cat <<-END_VERSIONS > versions.yml "${task.process}": From 484c8f5dfa40a2117997afcd8434bcd52ff4fe4e Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Thu, 28 Apr 2022 09:29:27 +0200 Subject: [PATCH 186/283] Update meta.yml --- modules/vardictjava/meta.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/vardictjava/meta.yml b/modules/vardictjava/meta.yml index 7820a877..5bc21b55 100644 --- a/modules/vardictjava/meta.yml +++ b/modules/vardictjava/meta.yml @@ -53,7 +53,7 @@ output: - vcf: type: file description: VCF file output - pattern: "*.vcf" + pattern: "*.vcf.gz" authors: - "@nvnieuwk" From 2b29ff5883278856200f003328ece8d4897548bc Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Thu, 28 Apr 2022 10:03:02 +0200 Subject: [PATCH 187/283] drop params.options --- subworkflows/nf-core/bam_qc_picard/main.nf | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/subworkflows/nf-core/bam_qc_picard/main.nf b/subworkflows/nf-core/bam_qc_picard/main.nf index 31edb2c3..b8a92a21 100644 --- a/subworkflows/nf-core/bam_qc_picard/main.nf +++ b/subworkflows/nf-core/bam_qc_picard/main.nf @@ -4,9 +4,9 @@ params.options = [:] -include { PICARD_COLLECTMULTIPLEMETRICS } from '../../../modules/picardcollectmultiplemetrics/main' addParams( options: params.options ) -include { PICARD_COLLECTWGSMETRICS } from '../../../modules/picardcollectwgsmetrics/main' addParams( options: params.options ) -include { PICARD_COLLECTHSMETRICS } from '../../../modules/picardcollecthsmetrics/main' addParams( options: params.options ) +include { PICARD_COLLECTMULTIPLEMETRICS } from '../../../modules/picardcollectmultiplemetrics/main' +include { PICARD_COLLECTWGSMETRICS } from '../../../modules/picardcollectwgsmetrics/main' +include { PICARD_COLLECTHSMETRICS } from '../../../modules/picardcollecthsmetrics/main' workflow BAM_QC_PICARD { take: From 8fb8199f3115c1a7f136202882b39be403ac5cd5 Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Thu, 28 Apr 2022 10:05:07 +0200 Subject: [PATCH 188/283] fix copilot suggestion --- subworkflows/nf-core/bam_qc_picard/main.nf | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/subworkflows/nf-core/bam_qc_picard/main.nf b/subworkflows/nf-core/bam_qc_picard/main.nf index b8a92a21..e4a7e5f0 100644 --- a/subworkflows/nf-core/bam_qc_picard/main.nf +++ b/subworkflows/nf-core/bam_qc_picard/main.nf @@ -22,10 +22,10 @@ workflow BAM_QC_PICARD { ch_versions = ch_versions.mix(PICARD_COLLECTMULTIPLEMETRICS.out.versions.first()) if (!ch_bait_interval.isEmpty() || !ch_target_interval.isEmpty()) { if (ch_bait_interval.isEmpty()) { - throw new Error("Bait interval channel is empty") + log.error("Bait interval channel is empty") } if (ch_target_interval.isEmpty()) { - throw new Error("Target interval channel is empty") + log.error("Target interval channel is empty") } PICARD_COLLECTHSMETRICS( ch_bam_bai, ch_fasta, ch_bait_interval, ch_target_interval ) ch_versions = ch_versions.mix(PICARD_COLLECTHSMETRICS.out.versions.first()) From 433aaece46967b56ba6a4c937ce1c8b0706b260e Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Thu, 28 Apr 2022 10:05:31 +0200 Subject: [PATCH 189/283] Update tests/subworkflows/nf-core/bam_qc_picard/main.nf Co-authored-by: Maxime U. Garcia --- tests/subworkflows/nf-core/bam_qc_picard/main.nf | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/subworkflows/nf-core/bam_qc_picard/main.nf b/tests/subworkflows/nf-core/bam_qc_picard/main.nf index d88f2bf9..9112416b 100644 --- a/tests/subworkflows/nf-core/bam_qc_picard/main.nf +++ b/tests/subworkflows/nf-core/bam_qc_picard/main.nf @@ -16,6 +16,8 @@ workflow test_bam_qc_picard_targetted { input = [ [ id:'test', single_end:false ], // meta map file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true) ] + bait = file(params.test_data['sarscov2']['genome']['baits_interval_list'], checkIfExists: true) + target = file(params.test_data['sarscov2']['genome']['targets_interval_list'], checkIfExists: true) - BAM_QC_PICARD ( input, [], file(params.test_data['sarscov2']['genome']['baits_interval_list'], checkIfExists: true), file(params.test_data['sarscov2']['genome']['targets_interval_list'], checkIfExists: true) ) + BAM_QC_PICARD ( input, [], bait, target ) } From 3cbf5c63e5677491377f23e6dda20443804a8158 Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Thu, 28 Apr 2022 10:09:25 +0200 Subject: [PATCH 190/283] Update subworkflows/nf-core/bam_qc_picard/main.nf Co-authored-by: Maxime U. Garcia --- subworkflows/nf-core/bam_qc_picard/main.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/subworkflows/nf-core/bam_qc_picard/main.nf b/subworkflows/nf-core/bam_qc_picard/main.nf index e4a7e5f0..851268c6 100644 --- a/subworkflows/nf-core/bam_qc_picard/main.nf +++ b/subworkflows/nf-core/bam_qc_picard/main.nf @@ -20,7 +20,7 @@ workflow BAM_QC_PICARD { PICARD_COLLECTMULTIPLEMETRICS( ch_bam_bai, ch_fasta] ) ch_versions = ch_versions.mix(PICARD_COLLECTMULTIPLEMETRICS.out.versions.first()) - if (!ch_bait_interval.isEmpty() || !ch_target_interval.isEmpty()) { + if (ch_bait_interval || ch_target_interval) { if (ch_bait_interval.isEmpty()) { log.error("Bait interval channel is empty") } From 881e9db4bfde569c8e9c0d51a1b3916817610fc0 Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Thu, 28 Apr 2022 10:19:30 +0200 Subject: [PATCH 191/283] update tests --- subworkflows/nf-core/bam_qc_picard/meta.yml | 2 +- tests/subworkflows/nf-core/bam_qc_picard/main.nf | 12 +++++++----- tests/subworkflows/nf-core/bam_qc_picard/test.yml | 2 +- 3 files changed, 9 insertions(+), 7 deletions(-) diff --git a/subworkflows/nf-core/bam_qc_picard/meta.yml b/subworkflows/nf-core/bam_qc_picard/meta.yml index 77104e82..c4422150 100644 --- a/subworkflows/nf-core/bam_qc_picard/meta.yml +++ b/subworkflows/nf-core/bam_qc_picard/meta.yml @@ -47,7 +47,7 @@ output: - hs_metrics: type: file description: Alignment metrics files generated by picard CollectHsMetrics - pattern: "*_collecthsmetrics.txt" + pattern: "*_metrics.txt" - wgs_metrics: type: file description: Alignment metrics files generated by picard CollectWgsMetrics diff --git a/tests/subworkflows/nf-core/bam_qc_picard/main.nf b/tests/subworkflows/nf-core/bam_qc_picard/main.nf index 9112416b..03696b44 100644 --- a/tests/subworkflows/nf-core/bam_qc_picard/main.nf +++ b/tests/subworkflows/nf-core/bam_qc_picard/main.nf @@ -6,18 +6,20 @@ include { BAM_QC_PICARD } from '../../../../subworkflows/nf-core/bam_qc_picard/m workflow test_bam_qc_picard_wgs { input = [ [ id:'test', single_end:false ], // meta map - file(params.test_data['sarscov2']['illumina']['test_single_end_bam'], checkIfExists: true) + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) - BAM_QC_PICARD ( input, [], [], [] ) + BAM_QC_PICARD ( input, fasta, [], [] ) } workflow test_bam_qc_picard_targetted { input = [ [ id:'test', single_end:false ], // meta map - file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true) + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) bait = file(params.test_data['sarscov2']['genome']['baits_interval_list'], checkIfExists: true) - target = file(params.test_data['sarscov2']['genome']['targets_interval_list'], checkIfExists: true) + target = file(params.test_data['sarscov2']['genome']['targets_interval_list'], checkIfExists: true) - BAM_QC_PICARD ( input, [], bait, target ) + BAM_QC_PICARD ( input, fasta, bait, target ) } diff --git a/tests/subworkflows/nf-core/bam_qc_picard/test.yml b/tests/subworkflows/nf-core/bam_qc_picard/test.yml index af98230d..7b0a6c2b 100644 --- a/tests/subworkflows/nf-core/bam_qc_picard/test.yml +++ b/tests/subworkflows/nf-core/bam_qc_picard/test.yml @@ -30,4 +30,4 @@ - path: ./output/picard/test.CollectMultipleMetrics.base_distribution_by_cycle_metrics - path: ./output/picard/test.CollectMultipleMetrics.quality_by_cycle_metrics - path: ./output/picard/test.CollectMultipleMetrics.quality_distribution_metrics - - path: ./output/picard/test_collecthsmetrics.txt + - path: ./output/picard/test.CollectHsMetrics.coverage_metrics From b92efb7abfc77cb8d9b4149c5fb866409122ce05 Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Thu, 28 Apr 2022 10:21:47 +0200 Subject: [PATCH 192/283] add nextflow.config --- tests/subworkflows/nf-core/bam_qc_picard/nextflow.config | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 tests/subworkflows/nf-core/bam_qc_picard/nextflow.config diff --git a/tests/subworkflows/nf-core/bam_qc_picard/nextflow.config b/tests/subworkflows/nf-core/bam_qc_picard/nextflow.config new file mode 100644 index 00000000..8730f1c4 --- /dev/null +++ b/tests/subworkflows/nf-core/bam_qc_picard/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} From 9df830f79dc70a347578120e797a4e3160dfc66d Mon Sep 17 00:00:00 2001 From: Nicolas Vannieuwkerke Date: Thu, 28 Apr 2022 10:23:07 +0200 Subject: [PATCH 193/283] Added the .bai and .fai files --- modules/vardictjava/main.nf | 8 ++++---- tests/modules/vardictjava/main.nf | 12 ++++++++---- tests/modules/vardictjava/test.yml | 15 ++++++--------- 3 files changed, 18 insertions(+), 17 deletions(-) diff --git a/modules/vardictjava/main.nf b/modules/vardictjava/main.nf index 425c1706..c370b6ef 100644 --- a/modules/vardictjava/main.nf +++ b/modules/vardictjava/main.nf @@ -8,12 +8,12 @@ process VARDICTJAVA { 'quay.io/biocontainers/vardict-java:1.8.3--hdfd78af_0' }" input: - tuple val(meta), path(bam) + tuple val(meta), path(bam), path(bai) path(regions_of_interest) - path(reference_fasta) + tuple path(reference_fasta), path(reference_fai) output: - tuple val(meta), path("*.vcf"), emit: vcf + tuple val(meta), path("*.vcf.gz"), emit: vcf path "versions.yml" , emit: versions when: @@ -30,7 +30,7 @@ process VARDICTJAVA { -th $task.cpus \\ -N $prefix \\ -G $reference_fasta \\ - $regions_of_interest \\ + $regions_of_interest | gzip -c > ${prefix}.vcf.gz cat <<-END_VERSIONS > versions.yml diff --git a/tests/modules/vardictjava/main.nf b/tests/modules/vardictjava/main.nf index 647c8bba..89eec407 100644 --- a/tests/modules/vardictjava/main.nf +++ b/tests/modules/vardictjava/main.nf @@ -8,12 +8,16 @@ workflow test_vardictjava { bam_input_ch = Channel.of([ [ id:'test', single_end:false ], // meta map - file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), + file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true) ]) - bed = file(params.test_data['homo_sapiens']['genome']['genome_bed'], checkIfExists: true) + bed = file(params.test_data['homo_sapiens']['genome']['genome_bed_gz'], checkIfExists: true) - fasta = file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true) + reference = Channel.of([ + file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true), + file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) + ]) - VARDICTJAVA ( bam_input_ch, bed, fasta ) + VARDICTJAVA ( bam_input_ch, bed, reference ) } diff --git a/tests/modules/vardictjava/test.yml b/tests/modules/vardictjava/test.yml index 001a56e8..77aa9047 100644 --- a/tests/modules/vardictjava/test.yml +++ b/tests/modules/vardictjava/test.yml @@ -1,12 +1,9 @@ -## TODO nf-core: Please run the following command to build this file: -# nf-core modules create-test-yml vardictjava -- name: "vardictjava" - command: nextflow run ./tests/modules/vardictjava -entry test_vardictjava -c ./tests/config/nextflow.config -c ./tests/modules/vardictjava/nextflow.config +- name: vardictjava test_vardictjava + command: nextflow run tests/modules/vardictjava -entry test_vardictjava -c tests/config/nextflow.config tags: - - "vardictjava" - # + - vardictjava files: - - path: "output/vardictjava/test.vcf" - md5sum: e667c7caad0bc4b7ac383fd023c654fc + - path: output/vardictjava/test.vcf.gz + md5sum: 2179dcaee6183495b421293f42db11b5 - path: output/vardictjava/versions.yml - md5sum: a01fe51bc4c6a3a6226fbf77b2c7cf3b + md5sum: aac455b8c9c9194c5fed80e4fd495b96 From dee8ec9e911ec848e93c8ed0b9667e7c674e9132 Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Thu, 28 Apr 2022 12:53:17 +0200 Subject: [PATCH 194/283] fix typos --- subworkflows/nf-core/bam_qc_picard/main.nf | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/subworkflows/nf-core/bam_qc_picard/main.nf b/subworkflows/nf-core/bam_qc_picard/main.nf index 851268c6..747d5995 100644 --- a/subworkflows/nf-core/bam_qc_picard/main.nf +++ b/subworkflows/nf-core/bam_qc_picard/main.nf @@ -2,11 +2,9 @@ // Run QC steps on BAM/CRAM files using Picard // -params.options = [:] - -include { PICARD_COLLECTMULTIPLEMETRICS } from '../../../modules/picardcollectmultiplemetrics/main' -include { PICARD_COLLECTWGSMETRICS } from '../../../modules/picardcollectwgsmetrics/main' -include { PICARD_COLLECTHSMETRICS } from '../../../modules/picardcollecthsmetrics/main' +include { PICARD_COLLECTMULTIPLEMETRICS } from '../../../modules/picard/collectmultiplemetrics/main' +include { PICARD_COLLECTWGSMETRICS } from '../../../modules/picard/collectwgsmetrics/main' +include { PICARD_COLLECTHSMETRICS } from '../../../modules/picard/collecthsmetrics/main' workflow BAM_QC_PICARD { take: @@ -18,7 +16,7 @@ workflow BAM_QC_PICARD { main: ch_versions = Channel.empty() - PICARD_COLLECTMULTIPLEMETRICS( ch_bam_bai, ch_fasta] ) + PICARD_COLLECTMULTIPLEMETRICS( ch_bam_bai, ch_fasta ) ch_versions = ch_versions.mix(PICARD_COLLECTMULTIPLEMETRICS.out.versions.first()) if (ch_bait_interval || ch_target_interval) { if (ch_bait_interval.isEmpty()) { From 45b4a61a15c8b7ac6c29a447ccc1ece9f08f8efe Mon Sep 17 00:00:00 2001 From: Nicolas Vannieuwkerke Date: Thu, 28 Apr 2022 13:31:37 +0200 Subject: [PATCH 195/283] Some small updates --- modules/vardictjava/main.nf | 2 +- tests/modules/vardictjava/main.nf | 8 ++++---- tests/modules/vardictjava/test.yml | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/modules/vardictjava/main.nf b/modules/vardictjava/main.nf index c370b6ef..26833436 100644 --- a/modules/vardictjava/main.nf +++ b/modules/vardictjava/main.nf @@ -30,7 +30,7 @@ process VARDICTJAVA { -th $task.cpus \\ -N $prefix \\ -G $reference_fasta \\ - $regions_of_interest + $regions_of_interest \\ | gzip -c > ${prefix}.vcf.gz cat <<-END_VERSIONS > versions.yml diff --git a/tests/modules/vardictjava/main.nf b/tests/modules/vardictjava/main.nf index 89eec407..d392358a 100644 --- a/tests/modules/vardictjava/main.nf +++ b/tests/modules/vardictjava/main.nf @@ -6,15 +6,15 @@ include { VARDICTJAVA } from '../../../modules/vardictjava/main.nf' workflow test_vardictjava { - bam_input_ch = Channel.of([ - [ id:'test', single_end:false ], // meta map + bam_input_ch = Channel.value([ + [ id:'test' ], // meta map file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), file(params.test_data['homo_sapiens']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true) ]) - bed = file(params.test_data['homo_sapiens']['genome']['genome_bed_gz'], checkIfExists: true) + bed = Channel.value(file(params.test_data['homo_sapiens']['genome']['genome_bed'], checkIfExists: true)) - reference = Channel.of([ + reference = Channel.value([ file(params.test_data['homo_sapiens']['genome']['genome_fasta'], checkIfExists: true), file(params.test_data['homo_sapiens']['genome']['genome_fasta_fai'], checkIfExists: true) ]) diff --git a/tests/modules/vardictjava/test.yml b/tests/modules/vardictjava/test.yml index 77aa9047..0bd21f4e 100644 --- a/tests/modules/vardictjava/test.yml +++ b/tests/modules/vardictjava/test.yml @@ -4,6 +4,6 @@ - vardictjava files: - path: output/vardictjava/test.vcf.gz - md5sum: 2179dcaee6183495b421293f42db11b5 + md5sum: 7029066c27ac6f5ef18d660d5741979a - path: output/vardictjava/versions.yml md5sum: aac455b8c9c9194c5fed80e4fd495b96 From 68f1c27169946f931ea4318911de5681f88b2961 Mon Sep 17 00:00:00 2001 From: Praveen Raj S <43108054+praveenraj2018@users.noreply.github.com> Date: Thu, 28 Apr 2022 13:41:20 +0200 Subject: [PATCH 196/283] Changed tbi as optional output in GATK4 HaplotypeCaller (#1576) * Changed tbi as optional output. HC cannot index a VCF from large chromosomes. * Apply suggestions from code review * Update modules/gatk4/haplotypecaller/main.nf Co-authored-by: Maxime U. Garcia --- modules/gatk4/haplotypecaller/main.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/gatk4/haplotypecaller/main.nf b/modules/gatk4/haplotypecaller/main.nf index 57f69ecd..2cd9e7d4 100644 --- a/modules/gatk4/haplotypecaller/main.nf +++ b/modules/gatk4/haplotypecaller/main.nf @@ -17,7 +17,7 @@ process GATK4_HAPLOTYPECALLER { output: tuple val(meta), path("*.vcf.gz"), emit: vcf - tuple val(meta), path("*.tbi") , emit: tbi + tuple val(meta), path("*.tbi") , optional:true, emit: tbi path "versions.yml" , emit: versions when: From e0a8af869324765ec0c6a375703bfa68f5ba4e77 Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Thu, 28 Apr 2022 13:42:14 +0200 Subject: [PATCH 197/283] fix outputs --- subworkflows/nf-core/bam_qc_picard/main.nf | 14 ++++++++------ subworkflows/nf-core/bam_qc_picard/meta.yml | 8 ++------ 2 files changed, 10 insertions(+), 12 deletions(-) diff --git a/subworkflows/nf-core/bam_qc_picard/main.nf b/subworkflows/nf-core/bam_qc_picard/main.nf index 747d5995..13f4a845 100644 --- a/subworkflows/nf-core/bam_qc_picard/main.nf +++ b/subworkflows/nf-core/bam_qc_picard/main.nf @@ -8,7 +8,7 @@ include { PICARD_COLLECTHSMETRICS } from '../../../modules/picard/collecth workflow BAM_QC_PICARD { take: - ch_bam_bai // channel: [ val(meta), [ bam ], [bai/csi] ] + ch_bam // channel: [ val(meta), [ bam ]] ch_fasta // channel: [ fasta ] ch_bait_interval // channel: [ bait_interval ] ch_target_interval // channel: [ target_interval ] @@ -16,7 +16,7 @@ workflow BAM_QC_PICARD { main: ch_versions = Channel.empty() - PICARD_COLLECTMULTIPLEMETRICS( ch_bam_bai, ch_fasta ) + PICARD_COLLECTMULTIPLEMETRICS( ch_bam, ch_fasta ) ch_versions = ch_versions.mix(PICARD_COLLECTMULTIPLEMETRICS.out.versions.first()) if (ch_bait_interval || ch_target_interval) { if (ch_bait_interval.isEmpty()) { @@ -25,16 +25,18 @@ workflow BAM_QC_PICARD { if (ch_target_interval.isEmpty()) { log.error("Target interval channel is empty") } - PICARD_COLLECTHSMETRICS( ch_bam_bai, ch_fasta, ch_bait_interval, ch_target_interval ) + PICARD_COLLECTHSMETRICS( ch_bam, ch_fasta, ch_bait_interval, ch_target_interval ) ch_versions = ch_versions.mix(PICARD_COLLECTHSMETRICS.out.versions.first()) } else { - PICARD_COLLECTWGSMETRICS( ch_bam_bai, ch_fasta ) + PICARD_COLLECTWGSMETRICS( ch_bam, ch_fasta ) ch_versions = ch_versions.mix(PICARD_COLLECTWGSMETRICS.out.versions.first()) } + ch_coverage_metrics = Channel.empty() + ch_coverage_metrics.mix(PICARD_COLLECTHSMETRICS.out.coverage_metrics.first(), PICARD_COLLECTWGSMETRICS.out.coverage_metrics.first()) + emit: - hs_metrics = PICARD_COLLECTHSMETRICS.out.hs_metrics // channel: [ val(meta), [ hs_metrics ] ] - wgs_metrics = PICARD_COLLECTWGSMETRICS.out.metrics // channel: [ val(meta), [ wgs_metrics ] ] + coverage_metrics = PICARD_COLLECTWGSMETRICS.out.metrics // channel: [ val(meta), [ coverage_metrics ] ] multiple_metrics = PICARD_COLLECTMULTIPLEMETRICS.out.metrics // channel: [ val(meta), [ multiple_metrics ] ] versions = ch_versions // channel: [ versions.yml ] diff --git a/subworkflows/nf-core/bam_qc_picard/meta.yml b/subworkflows/nf-core/bam_qc_picard/meta.yml index c4422150..67d3496e 100644 --- a/subworkflows/nf-core/bam_qc_picard/meta.yml +++ b/subworkflows/nf-core/bam_qc_picard/meta.yml @@ -44,14 +44,10 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - hs_metrics: + - coverage_metrics: type: file - description: Alignment metrics files generated by picard CollectHsMetrics + description: Alignment metrics files generated by picard CollectHsMetrics or CollectWgsMetrics pattern: "*_metrics.txt" - - wgs_metrics: - type: file - description: Alignment metrics files generated by picard CollectWgsMetrics - pattern: "*_{metrics}" - multiple_metrics: type: file description: Alignment metrics files generated by picard CollectMultipleMetrics From 55dfe1d8ab35873790c13ee4eb75af818fef80b2 Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Thu, 28 Apr 2022 13:43:59 +0200 Subject: [PATCH 198/283] fix hsmetrics input --- subworkflows/nf-core/bam_qc_picard/main.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/subworkflows/nf-core/bam_qc_picard/main.nf b/subworkflows/nf-core/bam_qc_picard/main.nf index 13f4a845..9e429448 100644 --- a/subworkflows/nf-core/bam_qc_picard/main.nf +++ b/subworkflows/nf-core/bam_qc_picard/main.nf @@ -25,7 +25,7 @@ workflow BAM_QC_PICARD { if (ch_target_interval.isEmpty()) { log.error("Target interval channel is empty") } - PICARD_COLLECTHSMETRICS( ch_bam, ch_fasta, ch_bait_interval, ch_target_interval ) + PICARD_COLLECTHSMETRICS( ch_bam, ch_fasta, [], ch_bait_interval, ch_target_interval ) ch_versions = ch_versions.mix(PICARD_COLLECTHSMETRICS.out.versions.first()) } else { PICARD_COLLECTWGSMETRICS( ch_bam, ch_fasta ) From 8ad861a645e86c411b47472616a924063afba106 Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Thu, 28 Apr 2022 13:52:03 +0200 Subject: [PATCH 199/283] add fasta index --- subworkflows/nf-core/bam_qc_picard/main.nf | 12 +++++++----- subworkflows/nf-core/bam_qc_picard/meta.yml | 10 +++++----- tests/subworkflows/nf-core/bam_qc_picard/main.nf | 12 +++++++----- 3 files changed, 19 insertions(+), 15 deletions(-) diff --git a/subworkflows/nf-core/bam_qc_picard/main.nf b/subworkflows/nf-core/bam_qc_picard/main.nf index 9e429448..e6f82feb 100644 --- a/subworkflows/nf-core/bam_qc_picard/main.nf +++ b/subworkflows/nf-core/bam_qc_picard/main.nf @@ -10,14 +10,17 @@ workflow BAM_QC_PICARD { take: ch_bam // channel: [ val(meta), [ bam ]] ch_fasta // channel: [ fasta ] + ch_fasta_faix // channel: [ fasta_fai ] ch_bait_interval // channel: [ bait_interval ] ch_target_interval // channel: [ target_interval ] main: ch_versions = Channel.empty() + ch_coverage_metrics = Channel.empty() PICARD_COLLECTMULTIPLEMETRICS( ch_bam, ch_fasta ) ch_versions = ch_versions.mix(PICARD_COLLECTMULTIPLEMETRICS.out.versions.first()) + if (ch_bait_interval || ch_target_interval) { if (ch_bait_interval.isEmpty()) { log.error("Bait interval channel is empty") @@ -26,18 +29,17 @@ workflow BAM_QC_PICARD { log.error("Target interval channel is empty") } PICARD_COLLECTHSMETRICS( ch_bam, ch_fasta, [], ch_bait_interval, ch_target_interval ) + ch_coverage_metrics.mix(PICARD_COLLECTHSMETRICS.out.coverage_metrics.first()) ch_versions = ch_versions.mix(PICARD_COLLECTHSMETRICS.out.versions.first()) } else { PICARD_COLLECTWGSMETRICS( ch_bam, ch_fasta ) ch_versions = ch_versions.mix(PICARD_COLLECTWGSMETRICS.out.versions.first()) + ch_coverage_metrics.mix(PICARD_COLLECTWGSMETRICS.out.coverage_metrics.first()) } - ch_coverage_metrics = Channel.empty() - ch_coverage_metrics.mix(PICARD_COLLECTHSMETRICS.out.coverage_metrics.first(), PICARD_COLLECTWGSMETRICS.out.coverage_metrics.first()) - emit: - coverage_metrics = PICARD_COLLECTWGSMETRICS.out.metrics // channel: [ val(meta), [ coverage_metrics ] ] + coverage_metrics = ch_coverage_metrics // channel: [ val(meta), [ coverage_metrics ] ] multiple_metrics = PICARD_COLLECTMULTIPLEMETRICS.out.metrics // channel: [ val(meta), [ multiple_metrics ] ] - versions = ch_versions // channel: [ versions.yml ] + versions = ch_versions // channel: [ versions.yml ] } diff --git a/subworkflows/nf-core/bam_qc_picard/meta.yml b/subworkflows/nf-core/bam_qc_picard/meta.yml index 67d3496e..c45215d1 100644 --- a/subworkflows/nf-core/bam_qc_picard/meta.yml +++ b/subworkflows/nf-core/bam_qc_picard/meta.yml @@ -22,14 +22,14 @@ input: type: file description: BAM/CRAM/SAM file pattern: "*.{bam,cram,sam}" - - bai: - type: file - description: Index for BAM/CRAM/SAM file - pattern: "*.{bai,crai,sai}" - fasta: type: optional file - description: Reference file the CRAM was created with + description: Reference fasta file pattern: "*.{fasta,fa}" + - fasta_fai: + type: optional file + description: Reference fasta file index + pattern: "*.{fasta,fa}.fai" - bait_intervals: type: optional file description: An interval list file that contains the locations of the baits used. diff --git a/tests/subworkflows/nf-core/bam_qc_picard/main.nf b/tests/subworkflows/nf-core/bam_qc_picard/main.nf index 03696b44..a3e2ed62 100644 --- a/tests/subworkflows/nf-core/bam_qc_picard/main.nf +++ b/tests/subworkflows/nf-core/bam_qc_picard/main.nf @@ -9,17 +9,19 @@ workflow test_bam_qc_picard_wgs { file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + fasta_fai = file(params.test_data['sarscov2']['genome']['genome_fasta_fai'], checkIfExists: true) - BAM_QC_PICARD ( input, fasta, [], [] ) + BAM_QC_PICARD ( input, fasta, fasta_fai, [], [] ) } workflow test_bam_qc_picard_targetted { input = [ [ id:'test', single_end:false ], // meta map file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true) ] - fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) - bait = file(params.test_data['sarscov2']['genome']['baits_interval_list'], checkIfExists: true) - target = file(params.test_data['sarscov2']['genome']['targets_interval_list'], checkIfExists: true) + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + fasta_fai = file(params.test_data['sarscov2']['genome']['genome_fasta_fai'], checkIfExists: true) + bait = file(params.test_data['sarscov2']['genome']['baits_interval_list'], checkIfExists: true) + target = file(params.test_data['sarscov2']['genome']['targets_interval_list'], checkIfExists: true) - BAM_QC_PICARD ( input, fasta, bait, target ) + BAM_QC_PICARD ( input, fasta, fasta_fai, bait, target ) } From 2fc857955f42c3aed840f9a61062b481a159ff4a Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Thu, 28 Apr 2022 13:53:48 +0200 Subject: [PATCH 200/283] fix index omission --- subworkflows/nf-core/bam_qc_picard/main.nf | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/subworkflows/nf-core/bam_qc_picard/main.nf b/subworkflows/nf-core/bam_qc_picard/main.nf index e6f82feb..341a4310 100644 --- a/subworkflows/nf-core/bam_qc_picard/main.nf +++ b/subworkflows/nf-core/bam_qc_picard/main.nf @@ -10,7 +10,7 @@ workflow BAM_QC_PICARD { take: ch_bam // channel: [ val(meta), [ bam ]] ch_fasta // channel: [ fasta ] - ch_fasta_faix // channel: [ fasta_fai ] + ch_fasta_fai // channel: [ fasta_fai ] ch_bait_interval // channel: [ bait_interval ] ch_target_interval // channel: [ target_interval ] @@ -28,7 +28,7 @@ workflow BAM_QC_PICARD { if (ch_target_interval.isEmpty()) { log.error("Target interval channel is empty") } - PICARD_COLLECTHSMETRICS( ch_bam, ch_fasta, [], ch_bait_interval, ch_target_interval ) + PICARD_COLLECTHSMETRICS( ch_bam, ch_fasta, ch_fasta_fai, ch_bait_interval, ch_target_interval ) ch_coverage_metrics.mix(PICARD_COLLECTHSMETRICS.out.coverage_metrics.first()) ch_versions = ch_versions.mix(PICARD_COLLECTHSMETRICS.out.versions.first()) } else { From 4618c542e9d64cd71176cfd24fe0edc908eb3d10 Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Thu, 28 Apr 2022 13:59:46 +0200 Subject: [PATCH 201/283] fix metrics output --- subworkflows/nf-core/bam_qc_picard/main.nf | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/subworkflows/nf-core/bam_qc_picard/main.nf b/subworkflows/nf-core/bam_qc_picard/main.nf index 341a4310..b8be04b2 100644 --- a/subworkflows/nf-core/bam_qc_picard/main.nf +++ b/subworkflows/nf-core/bam_qc_picard/main.nf @@ -29,12 +29,12 @@ workflow BAM_QC_PICARD { log.error("Target interval channel is empty") } PICARD_COLLECTHSMETRICS( ch_bam, ch_fasta, ch_fasta_fai, ch_bait_interval, ch_target_interval ) - ch_coverage_metrics.mix(PICARD_COLLECTHSMETRICS.out.coverage_metrics.first()) + ch_coverage_metrics.mix(PICARD_COLLECTHSMETRICS.out.metrics.first()) ch_versions = ch_versions.mix(PICARD_COLLECTHSMETRICS.out.versions.first()) } else { PICARD_COLLECTWGSMETRICS( ch_bam, ch_fasta ) ch_versions = ch_versions.mix(PICARD_COLLECTWGSMETRICS.out.versions.first()) - ch_coverage_metrics.mix(PICARD_COLLECTWGSMETRICS.out.coverage_metrics.first()) + ch_coverage_metrics.mix(PICARD_COLLECTWGSMETRICS.out.metrics.first()) } emit: From 57cb730e78634673fb254a77606e014ce942734c Mon Sep 17 00:00:00 2001 From: Daniel Cooke Date: Thu, 28 Apr 2022 13:06:38 +0100 Subject: [PATCH 202/283] Fix tiddit/sv (#1580) * Fix fasta ref option for TIDDIT_SV * Add md5sum's to tiddit/sv tests Co-authored-by: Daniel Cooke --- modules/tiddit/sv/main.nf | 2 +- tests/modules/tiddit/sv/test.yml | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/modules/tiddit/sv/main.nf b/modules/tiddit/sv/main.nf index 1bf7146a..b3e3813c 100644 --- a/modules/tiddit/sv/main.nf +++ b/modules/tiddit/sv/main.nf @@ -24,7 +24,7 @@ process TIDDIT_SV { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" - def reference = fasta == "dummy_file.txt" ? "--ref $fasta" : "" + def reference = fasta ? "--ref $fasta" : "" """ tiddit \\ --sv \\ diff --git a/tests/modules/tiddit/sv/test.yml b/tests/modules/tiddit/sv/test.yml index 168d21c5..40ea5b4d 100644 --- a/tests/modules/tiddit/sv/test.yml +++ b/tests/modules/tiddit/sv/test.yml @@ -9,6 +9,7 @@ - path: output/tiddit/test.signals.tab md5sum: dab4b2fec4ddf8eb1c23005b0770150e - path: output/tiddit/test.vcf + md5sum: bdce14ae8292bf3deb81f6f255baf859 - name: tiddit sv no ref command: nextflow run ./tests/modules/tiddit/sv -entry test_tiddit_sv_no_ref -c ./tests/config/nextflow.config -c ./tests/modules/tiddit/sv/nextflow.config @@ -21,3 +22,4 @@ - path: output/tiddit/test.signals.tab md5sum: dab4b2fec4ddf8eb1c23005b0770150e - path: output/tiddit/test.vcf + md5sum: 3d0e83a8199b2bdb81cfe3e6b12bf64b From 8c98a78d2a0e12d081184736e17ccde3feee3f44 Mon Sep 17 00:00:00 2001 From: Nicolas Vannieuwkerke Date: Thu, 28 Apr 2022 14:07:55 +0200 Subject: [PATCH 203/283] Fixed the issue with the BED file --- modules/vardictjava/main.nf | 1 + tests/modules/vardictjava/test.yml | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/modules/vardictjava/main.nf b/modules/vardictjava/main.nf index 26833436..d6b4f1dd 100644 --- a/modules/vardictjava/main.nf +++ b/modules/vardictjava/main.nf @@ -26,6 +26,7 @@ process VARDICTJAVA { """ vardict-java \\ $args \\ + -c 1 -S 2 -E 3 \\ -b $bam \\ -th $task.cpus \\ -N $prefix \\ diff --git a/tests/modules/vardictjava/test.yml b/tests/modules/vardictjava/test.yml index 0bd21f4e..77aa9047 100644 --- a/tests/modules/vardictjava/test.yml +++ b/tests/modules/vardictjava/test.yml @@ -4,6 +4,6 @@ - vardictjava files: - path: output/vardictjava/test.vcf.gz - md5sum: 7029066c27ac6f5ef18d660d5741979a + md5sum: 2179dcaee6183495b421293f42db11b5 - path: output/vardictjava/versions.yml md5sum: aac455b8c9c9194c5fed80e4fd495b96 From cdefbec66999c0b49d8bfeea9d6f9d19056635a2 Mon Sep 17 00:00:00 2001 From: JIANHONG OU Date: Thu, 28 Apr 2022 08:16:26 -0400 Subject: [PATCH 204/283] add kimporttext module (#1560) * add kimporttext module * fix the Prettier error. * fix the Prettier error. * fix the test.yml * fix the test.yml * Update modules/krona/ktimporttext/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/krona/ktimporttext/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/krona/ktimporttext/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/krona/ktimporttext/meta.yml Co-authored-by: James A. Fellows Yates * Update modules/krona/ktimporttext/meta.yml Co-authored-by: James A. Fellows Yates * update the test files; simplify the main script of krona/ktimporttext. * update the test file url for krona ktimporttext Co-authored-by: James A. Fellows Yates --- modules/krona/ktimporttext/main.nf | 34 ++++++++++++++ modules/krona/ktimporttext/meta.yml | 47 +++++++++++++++++++ tests/config/pytest_modules.yml | 4 ++ tests/config/test_data.config | 3 ++ tests/modules/krona/ktimporttext/main.nf | 31 ++++++++++++ .../krona/ktimporttext/nextflow.config | 5 ++ tests/modules/krona/ktimporttext/test.yml | 19 ++++++++ 7 files changed, 143 insertions(+) create mode 100644 modules/krona/ktimporttext/main.nf create mode 100644 modules/krona/ktimporttext/meta.yml create mode 100644 tests/modules/krona/ktimporttext/main.nf create mode 100644 tests/modules/krona/ktimporttext/nextflow.config create mode 100644 tests/modules/krona/ktimporttext/test.yml diff --git a/modules/krona/ktimporttext/main.nf b/modules/krona/ktimporttext/main.nf new file mode 100644 index 00000000..de0cfc22 --- /dev/null +++ b/modules/krona/ktimporttext/main.nf @@ -0,0 +1,34 @@ +process KRONA_KTIMPORTTEXT { + tag "$meta.id" + label 'process_low' + + conda (params.enable_conda ? "bioconda::krona=2.8.1" : null) + container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? + 'https://depot.galaxyproject.org/singularity/krona:2.8.1--pl5321hdfd78af_1': + 'quay.io/biocontainers/krona:2.8.1--pl5321hdfd78af_1' }" + + input: + tuple val(meta), path(report) + + output: + tuple val(meta), path ('*.html'), emit: html + path "versions.yml" , emit: versions + + when: + task.ext.when == null || task.ext.when + + script: + def args = task.ext.args ?: '' + def prefix = task.ext.prefix ?: "${meta.id}" + """ + ktImportText \\ + $args \\ + -o ${prefix}.html \\ + $report + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + krona: \$( echo \$(ktImportText 2>&1) | sed 's/^.*KronaTools //g; s/- ktImportText.*\$//g') + END_VERSIONS + """ +} diff --git a/modules/krona/ktimporttext/meta.yml b/modules/krona/ktimporttext/meta.yml new file mode 100644 index 00000000..a7108e0d --- /dev/null +++ b/modules/krona/ktimporttext/meta.yml @@ -0,0 +1,47 @@ +name: "krona_ktimporttext" +description: Creates a Krona chart from text files listing quantities and lineages. +keywords: + - plot + - taxonomy + - interactive + - html + - visualisation + - krona chart + - metagenomics +tools: + - krona: + description: Krona Tools is a set of scripts to create Krona charts from several Bioinformatics tools as well as from text and XML files. + homepage: https://github.com/marbl/Krona/wiki/KronaTools + documentation: http://manpages.ubuntu.com/manpages/impish/man1/ktImportTaxonomy.1.html + tool_dev_url: https://github.com/marbl/Krona + doi: 10.1186/1471-2105-12-385 + licence: https://raw.githubusercontent.com/marbl/Krona/master/KronaTools/LICENSE.txt + +input: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test'] + - report: + type: file + description: "Tab-delimited text file. Each line should be a number followed by a list of wedges to contribute to (starting from the highest level). If no wedges are listed (and just a quantity is given), it will contribute to the top level. If the same lineage is listed more than once, the values will be added. Quantities can be omitted if -q is specified. Lines beginning with '#' will be ignored." + pattern: "*.{txt}" + +output: + - meta: + type: map + description: | + Groovy Map containing sample information + e.g. [ id:'test' ] + - versions: + type: file + description: File containing software versions + pattern: "versions.yml" + - html: + type: file + description: A html file containing an interactive krona plot. + pattern: "*.{html}" + +authors: + - "@jianhong" diff --git a/tests/config/pytest_modules.yml b/tests/config/pytest_modules.yml index 4d8ce0b5..dfcbfa8c 100644 --- a/tests/config/pytest_modules.yml +++ b/tests/config/pytest_modules.yml @@ -1050,6 +1050,10 @@ krona/ktimporttaxonomy: - modules/krona/ktimporttaxonomy/** - tests/modules/krona/ktimporttaxonomy/** +krona/ktimporttext: + - modules/krona/ktimporttext/** + - tests/modules/krona/ktimporttext/** + last/dotplot: - modules/last/dotplot/** - tests/modules/last/dotplot/** diff --git a/tests/config/test_data.config b/tests/config/test_data.config index b3171a51..5d5535c4 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -109,6 +109,9 @@ params { test_sequencing_summary = "${test_data_dir}/genomics/sarscov2/nanopore/sequencing_summary/test.sequencing_summary.txt" } + 'metagenome' { + kraken_report = "${test_data_dir}/genomics/sarscov2/metagenome/test_1.kraken2.report.txt" + } } 'homo_sapiens' { 'genome' { diff --git a/tests/modules/krona/ktimporttext/main.nf b/tests/modules/krona/ktimporttext/main.nf new file mode 100644 index 00000000..3d288b7b --- /dev/null +++ b/tests/modules/krona/ktimporttext/main.nf @@ -0,0 +1,31 @@ +#!/usr/bin/env nextflow + +nextflow.enable.dsl = 2 + +include { KRONA_KTIMPORTTEXT } from '../../../../modules/krona/ktimporttext/main.nf' + +workflow test_krona_ktimporttext_multi { + + input = [ + [ id:'test', single_end:false ], // meta map + [ + file('https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/delete_me/krona/ktimporttext.txt', checkIfExists: true), // krona default test file + file(params.test_data['sarscov2']['metagenome']['kraken_report'], checkIfExists: true), //Kraken2 report file + file('https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/delete_me/krona/kaiju_out4krona.txt', checkIfExists: true) // Kaiju output 4 krona + ] + ] + + KRONA_KTIMPORTTEXT ( input ) +} + +workflow test_krona_ktimporttext_single { + + input = [ + [ id:'test', single_end:false ], // meta map + [ + file('http://krona.sourceforge.net/examples/text.txt', checkIfExists: true) // krona default test file + ] + ] + + KRONA_KTIMPORTTEXT ( input ) +} diff --git a/tests/modules/krona/ktimporttext/nextflow.config b/tests/modules/krona/ktimporttext/nextflow.config new file mode 100644 index 00000000..50f50a7a --- /dev/null +++ b/tests/modules/krona/ktimporttext/nextflow.config @@ -0,0 +1,5 @@ +process { + + publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } + +} \ No newline at end of file diff --git a/tests/modules/krona/ktimporttext/test.yml b/tests/modules/krona/ktimporttext/test.yml new file mode 100644 index 00000000..93ae12da --- /dev/null +++ b/tests/modules/krona/ktimporttext/test.yml @@ -0,0 +1,19 @@ +- name: krona ktimporttext test_krona_ktimporttext_multi + command: nextflow run tests/modules/krona/ktimporttext -entry test_krona_ktimporttext_multi -c tests/config/nextflow.config + tags: + - krona + - krona/ktimporttext + files: + - path: output/krona/test.html + contains: + - "DOCTYPE html PUBLIC" + +- name: krona ktimporttext test_krona_ktimporttext_single + command: nextflow run tests/modules/krona/ktimporttext -entry test_krona_ktimporttext_single -c tests/config/nextflow.config + tags: + - krona + - krona/ktimporttext + files: + - path: output/krona/test.html + contains: + - "DOCTYPE html PUBLIC" From bf186044716cb0621f31ccc4727ad4a0fb7c81b9 Mon Sep 17 00:00:00 2001 From: Nicolas Vannieuwkerke Date: Thu, 28 Apr 2022 14:29:37 +0200 Subject: [PATCH 205/283] fix for a linting issue --- modules/vardictjava/main.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/vardictjava/main.nf b/modules/vardictjava/main.nf index d6b4f1dd..3e036ebd 100644 --- a/modules/vardictjava/main.nf +++ b/modules/vardictjava/main.nf @@ -14,7 +14,7 @@ process VARDICTJAVA { output: tuple val(meta), path("*.vcf.gz"), emit: vcf - path "versions.yml" , emit: versions + path "versions.yml" , emit: versions when: task.ext.when == null || task.ext.when From 40e6e5273de785042fc1b4d6fc1d7510729a7789 Mon Sep 17 00:00:00 2001 From: Nicolas Vannieuwkerke Date: Thu, 28 Apr 2022 14:30:04 +0200 Subject: [PATCH 206/283] fix for a linting issue --- modules/vardictjava/meta.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/modules/vardictjava/meta.yml b/modules/vardictjava/meta.yml index 5bc21b55..39368f81 100644 --- a/modules/vardictjava/meta.yml +++ b/modules/vardictjava/meta.yml @@ -15,7 +15,6 @@ tools: licence: "['MIT']" input: - # Only when we have meta - meta: type: map description: | @@ -38,7 +37,6 @@ input: pattern: "*.bed" output: - #Only when we have meta - meta: type: map description: | From b3e9b88e80880f450ad79a95b2b7aa05e1de5484 Mon Sep 17 00:00:00 2001 From: Praveen Raj S <43108054+praveenraj2018@users.noreply.github.com> Date: Thu, 28 Apr 2022 14:41:01 +0200 Subject: [PATCH 207/283] csi output in TABIX (#1579) * Added: csi output to TABIX * Added: tests for csi * Fix tiddit/sv (#1580) * Fix fasta ref option for TIDDIT_SV * Add md5sum's to tiddit/sv tests Co-authored-by: Daniel Cooke * Fix: prettier issues Co-authored-by: Daniel Cooke Co-authored-by: Daniel Cooke --- modules/tabix/tabix/main.nf | 3 ++- modules/tabix/tabix/meta.yml | 4 ++++ tests/modules/tabix/tabix/main.nf | 19 ++++++++++++++----- tests/modules/tabix/tabix/nextflow.config | 6 +++++- tests/modules/tabix/tabix/test.yml | 10 +++++++++- 5 files changed, 34 insertions(+), 8 deletions(-) diff --git a/modules/tabix/tabix/main.nf b/modules/tabix/tabix/main.nf index c9dab068..e155e468 100644 --- a/modules/tabix/tabix/main.nf +++ b/modules/tabix/tabix/main.nf @@ -11,7 +11,8 @@ process TABIX_TABIX { tuple val(meta), path(tab) output: - tuple val(meta), path("*.tbi"), emit: tbi + tuple val(meta), path("*.tbi"), optional:true, emit: tbi + tuple val(meta), path("*.csi"), optional:true, emit: csi path "versions.yml" , emit: versions when: diff --git a/modules/tabix/tabix/meta.yml b/modules/tabix/tabix/meta.yml index 89478abe..fcc6e524 100644 --- a/modules/tabix/tabix/meta.yml +++ b/modules/tabix/tabix/meta.yml @@ -31,6 +31,10 @@ output: type: file description: tabix index file pattern: "*.{tbi}" + - csi: + type: file + description: coordinate sorted index file + pattern: "*.{csi}" - versions: type: file description: File containing software versions diff --git a/tests/modules/tabix/tabix/main.nf b/tests/modules/tabix/tabix/main.nf index 993ee812..da26f4d7 100644 --- a/tests/modules/tabix/tabix/main.nf +++ b/tests/modules/tabix/tabix/main.nf @@ -2,9 +2,10 @@ nextflow.enable.dsl = 2 -include { TABIX_TABIX as TABIX_BED } from '../../../../modules/tabix/tabix/main.nf' -include { TABIX_TABIX as TABIX_GFF } from '../../../../modules/tabix/tabix/main.nf' -include { TABIX_TABIX as TABIX_VCF } from '../../../../modules/tabix/tabix/main.nf' +include { TABIX_TABIX as TABIX_BED } from '../../../../modules/tabix/tabix/main.nf' +include { TABIX_TABIX as TABIX_GFF } from '../../../../modules/tabix/tabix/main.nf' +include { TABIX_TABIX as TABIX_VCF_TBI } from '../../../../modules/tabix/tabix/main.nf' +include { TABIX_TABIX as TABIX_VCF_CSI } from '../../../../modules/tabix/tabix/main.nf' workflow test_tabix_tabix_bed { input = [ [ id:'B.bed' ], // meta map @@ -22,10 +23,18 @@ workflow test_tabix_tabix_gff { TABIX_GFF ( input ) } -workflow test_tabix_tabix_vcf { +workflow test_tabix_tabix_vcf_tbi { input = [ [ id:'test.vcf' ], // meta map [ file(params.test_data['sarscov2']['illumina']['test_vcf_gz'], checkIfExists: true) ] ] - TABIX_VCF ( input ) + TABIX_VCF_TBI ( input ) +} + +workflow test_tabix_tabix_vcf_csi { + input = [ [ id:'test.vcf' ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_vcf_gz'], checkIfExists: true) ] + ] + + TABIX_VCF_CSI ( input ) } diff --git a/tests/modules/tabix/tabix/nextflow.config b/tests/modules/tabix/tabix/nextflow.config index aa97a873..139e0865 100644 --- a/tests/modules/tabix/tabix/nextflow.config +++ b/tests/modules/tabix/tabix/nextflow.config @@ -10,8 +10,12 @@ process { ext.args = '-p gff' } - withName: TABIX_VCF { + withName: TABIX_VCF_TBI { ext.args = '-p vcf' } + withName: TABIX_VCF_CSI { + ext.args = '-p vcf --csi' + } + } diff --git a/tests/modules/tabix/tabix/test.yml b/tests/modules/tabix/tabix/test.yml index 46be28dd..6d168ef5 100644 --- a/tests/modules/tabix/tabix/test.yml +++ b/tests/modules/tabix/tabix/test.yml @@ -15,10 +15,18 @@ - path: ./output/tabix/genome.gff3.gz.tbi md5sum: f79a67d95a98076e04fbe0455d825926 - name: tabix tabix vcf - command: nextflow run ./tests/modules/tabix/tabix -entry test_tabix_tabix_vcf -c ./tests/config/nextflow.config -c ./tests/modules/tabix/tabix/nextflow.config + command: nextflow run ./tests/modules/tabix/tabix -entry test_tabix_tabix_vcf_tbi -c ./tests/config/nextflow.config -c ./tests/modules/tabix/tabix/nextflow.config tags: - tabix - tabix/tabix files: - path: output/tabix/test.vcf.gz.tbi md5sum: 36e11bf96ed0af4a92caa91a68612d64 +- name: tabix tabix vcf csi + command: nextflow run ./tests/modules/tabix/tabix -entry test_tabix_tabix_vcf_csi -c ./tests/config/nextflow.config -c ./tests/modules/tabix/tabix/nextflow.config + tags: + - tabix + - tabix/tabix + files: + - path: output/tabix/test.vcf.gz.csi + md5sum: 5f930522d2b9dcdba2807b7da4dfa3fd From 6678ad4426664076d912dab488ea641f44c1fdb6 Mon Sep 17 00:00:00 2001 From: Nicolas Vannieuwkerke Date: Thu, 28 Apr 2022 14:54:20 +0200 Subject: [PATCH 208/283] Added a header to the VCF --- modules/vardictjava/main.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/vardictjava/main.nf b/modules/vardictjava/main.nf index 3e036ebd..a28cb539 100644 --- a/modules/vardictjava/main.nf +++ b/modules/vardictjava/main.nf @@ -26,7 +26,7 @@ process VARDICTJAVA { """ vardict-java \\ $args \\ - -c 1 -S 2 -E 3 \\ + -c 1 -S 2 -E 3 -h \\ -b $bam \\ -th $task.cpus \\ -N $prefix \\ From bdff37c3d7c880f599af223504808306e75e2d76 Mon Sep 17 00:00:00 2001 From: Nicolas Vannieuwkerke Date: Thu, 28 Apr 2022 15:33:27 +0200 Subject: [PATCH 209/283] Added the statistical tests and conversion to vcf --- modules/vardictjava/main.nf | 7 ++++++- tests/modules/vardictjava/nextflow.config | 1 + tests/modules/vardictjava/test.yml | 2 +- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/modules/vardictjava/main.nf b/modules/vardictjava/main.nf index a28cb539..92051074 100644 --- a/modules/vardictjava/main.nf +++ b/modules/vardictjava/main.nf @@ -21,17 +21,22 @@ process VARDICTJAVA { script: def args = task.ext.args ?: '' + def args_conversion = task.ext.args_conversion ?: '' def prefix = task.ext.prefix ?: "${meta.id}" """ vardict-java \\ $args \\ - -c 1 -S 2 -E 3 -h \\ + -c 1 -S 2 -E 3 \\ -b $bam \\ -th $task.cpus \\ -N $prefix \\ -G $reference_fasta \\ $regions_of_interest \\ + | teststrandbias.R \\ + | var2vcf_valid.pl \\ + $args_conversion \\ + -N $prefix \\ | gzip -c > ${prefix}.vcf.gz cat <<-END_VERSIONS > versions.yml diff --git a/tests/modules/vardictjava/nextflow.config b/tests/modules/vardictjava/nextflow.config index 5dc176a9..fc698f59 100644 --- a/tests/modules/vardictjava/nextflow.config +++ b/tests/modules/vardictjava/nextflow.config @@ -4,5 +4,6 @@ process { withName: VARDICTJAVA { ext.args = '' + ext.args_conversion = '' } } \ No newline at end of file diff --git a/tests/modules/vardictjava/test.yml b/tests/modules/vardictjava/test.yml index 77aa9047..549d688e 100644 --- a/tests/modules/vardictjava/test.yml +++ b/tests/modules/vardictjava/test.yml @@ -4,6 +4,6 @@ - vardictjava files: - path: output/vardictjava/test.vcf.gz - md5sum: 2179dcaee6183495b421293f42db11b5 + md5sum: 3f1f227afc532bddeb58f16fd3013fc8 - path: output/vardictjava/versions.yml md5sum: aac455b8c9c9194c5fed80e4fd495b96 From d626a1dcd3ca759c2c398efe49642a39d843ecd7 Mon Sep 17 00:00:00 2001 From: Nicolas Vannieuwkerke Date: Thu, 28 Apr 2022 15:41:43 +0200 Subject: [PATCH 210/283] Removed a whitespace in meta.yml... --- modules/vardictjava/meta.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/vardictjava/meta.yml b/modules/vardictjava/meta.yml index 39368f81..e3b2efe7 100644 --- a/modules/vardictjava/meta.yml +++ b/modules/vardictjava/meta.yml @@ -25,7 +25,7 @@ input: type: file description: BAM/SAM file pattern: "*.{bam,sam}" - + - reference_fasta: type: file description: FASTA of the reference genome From 4b04ca22ddc577951bc59a693e247c6ff5949dd7 Mon Sep 17 00:00:00 2001 From: Nicolas Vannieuwkerke Date: Thu, 28 Apr 2022 16:38:49 +0200 Subject: [PATCH 211/283] Variable name change for var2vcf conversion arguments --- modules/vardictjava/main.nf | 4 ++-- tests/modules/vardictjava/nextflow.config | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/modules/vardictjava/main.nf b/modules/vardictjava/main.nf index 92051074..08318c29 100644 --- a/modules/vardictjava/main.nf +++ b/modules/vardictjava/main.nf @@ -21,7 +21,7 @@ process VARDICTJAVA { script: def args = task.ext.args ?: '' - def args_conversion = task.ext.args_conversion ?: '' + def args2 = task.ext.args2 ?: '' def prefix = task.ext.prefix ?: "${meta.id}" """ @@ -35,7 +35,7 @@ process VARDICTJAVA { $regions_of_interest \\ | teststrandbias.R \\ | var2vcf_valid.pl \\ - $args_conversion \\ + $args2 \\ -N $prefix \\ | gzip -c > ${prefix}.vcf.gz diff --git a/tests/modules/vardictjava/nextflow.config b/tests/modules/vardictjava/nextflow.config index fc698f59..e08201cc 100644 --- a/tests/modules/vardictjava/nextflow.config +++ b/tests/modules/vardictjava/nextflow.config @@ -4,6 +4,6 @@ process { withName: VARDICTJAVA { ext.args = '' - ext.args_conversion = '' + ext.args2 = '' } } \ No newline at end of file From c7408538746bd2c8d64418691c0e0440483c48da Mon Sep 17 00:00:00 2001 From: Jasmin F <73216762+jasmezz@users.noreply.github.com> Date: Thu, 28 Apr 2022 16:42:44 +0200 Subject: [PATCH 212/283] Antismash db output patch 1 (#3) * Update main.nf * Update meta.yml * Update test.yml --- .../antismashlitedownloaddatabases/main.nf | 6 +++--- .../antismashlitedownloaddatabases/meta.yml | 18 ++++-------------- .../antismashlitedownloaddatabases/test.yml | 9 ++++----- 3 files changed, 11 insertions(+), 22 deletions(-) diff --git a/modules/antismash/antismashlitedownloaddatabases/main.nf b/modules/antismash/antismashlitedownloaddatabases/main.nf index 72314eee..5f9141f0 100644 --- a/modules/antismash/antismashlitedownloaddatabases/main.nf +++ b/modules/antismash/antismashlitedownloaddatabases/main.nf @@ -27,9 +27,7 @@ process ANTISMASH_ANTISMASHLITEDOWNLOADDATABASES { output: path("antismash_db") , emit: database - path("css"), emit: css_dir - path("detection"), emit: detection_dir - path("modules"), emit: modules_dir + path("antismash_dir"), emit: antismash_dir path "versions.yml", emit: versions when: @@ -42,6 +40,8 @@ process ANTISMASH_ANTISMASHLITEDOWNLOADDATABASES { --database-dir antismash_db \\ $args + cp -r /usr/local/lib/python3.8/site-packages/antismash antismash_dir + cat <<-END_VERSIONS > versions.yml "${task.process}": antismash-lite: \$(antismash --version | sed 's/antiSMASH //') diff --git a/modules/antismash/antismashlitedownloaddatabases/meta.yml b/modules/antismash/antismashlitedownloaddatabases/meta.yml index 619dc8c2..4b9644c4 100644 --- a/modules/antismash/antismashlitedownloaddatabases/meta.yml +++ b/modules/antismash/antismashlitedownloaddatabases/meta.yml @@ -27,7 +27,7 @@ input: - database_css: type: directory description: | - antismash/outputs/html/css folder which is being created during the antiSMASH database downloading step. These files are normally downloaded by download-antismash-databases itself, and must be retrieved by the user by manually running the command with conda or a standalone installation of antiSMASH. Therefore we do not recommend using this module for production pipelines, but rather require users to specify their own local copy of the antiSMASH database in pipelines. + antismash/outputs/html/css folder which is being created during the antiSMASH database downloading step. These files are normally downloaded by download-antismash-databases itself, and must be retrieved by ther use by manually running the command with conda or a standalone installation of antiSMASH. Therefore we do not recommend using this module for production pipelines, but rather require users to specify their own local copy of the antiSMASH database in pipelines. pattern: "css" - database_detection: type: directory @@ -50,21 +50,11 @@ output: type: directory description: Download directory for antiSMASH databases pattern: "antismash_db" - - css_dir: + - antismash_dir: type: directory description: | - antismash/outputs/html/css folder which is being created during the antiSMASH database downloading step. These files are normally downloaded by download-antismash-databases itself, and must be retrieved by the user by manually running the command with conda or a standalone installation of antiSMASH. Therefore we do not recommend using this module for production pipelines, but rather require users to specify their own local copy of the antiSMASH database in pipelines. - pattern: "css" - - detection_dir: - type: directory - description: | - antismash/detection folder which is being created during the antiSMASH database downloading step. These files are normally downloaded by download-antismash-databases itself, and must be retrieved by the user by manually running the command with conda or a standalone installation of antiSMASH. Therefore we do not recommend using this module for production pipelines, but rather require users to specify their own local copy of the antiSMASH database in pipelines. - pattern: "detection" - - modules_dir: - type: directory - description: | - antismash/modules folder which is being created during the antiSMASH database downloading step. These files are normally downloaded by download-antismash-databases itself, and must be retrieved by the user by manually running the command with conda or a standalone installation of antiSMASH. Therefore we do not recommend using this module for production pipelines, but rather require users to specify their own local copy of the antiSMASH database in pipelines. - pattern: "modules" + antismash installation folder which is being modified during the antiSMASH database downloading step. The modified files are normally downloaded by download-antismash-databases itself, and must be retrieved by the user by manually running the command with conda or a standalone installation of antiSMASH. Therefore we do not recommend using this module for production pipelines, but rather require users to specify their own local copy of the antiSMASH database and installation folder in pipelines. + pattern: "antismash_dir" authors: - "@jasmezz" diff --git a/tests/modules/antismash/antismashlitedownloaddatabases/test.yml b/tests/modules/antismash/antismashlitedownloaddatabases/test.yml index 1079363c..f9143163 100644 --- a/tests/modules/antismash/antismashlitedownloaddatabases/test.yml +++ b/tests/modules/antismash/antismashlitedownloaddatabases/test.yml @@ -1,17 +1,16 @@ - name: antismash antismashlitedownloaddatabases test_antismash_antismashlitedownloaddatabases command: nextflow run tests/modules/antismash/antismashlitedownloaddatabases -entry test_antismash_antismashlitedownloaddatabases -c tests/config/nextflow.config tags: - - antismash - antismash/antismashlitedownloaddatabases + - antismash files: - path: output/antismash/versions.yml - md5sum: 24859c67023abab99de295d3675a24b6 + md5sum: e2656c8d2bcc7469eba40eb1ee5c91b3 - path: output/antismash/antismash_db - path: output/antismash/antismash_db/clusterblast - path: output/antismash/antismash_db/clustercompare - path: output/antismash/antismash_db/pfam - path: output/antismash/antismash_db/resfam - path: output/antismash/antismash_db/tigrfam - - path: output/antismash/css - - path: output/antismash/detection - - path: output/antismash/modules + - path: output/antismash/antismash_dir + - path: output/antismash/antismash_dir/detection/hmm_detection/data/bgc_seeds.hmm From 39258e53d7b6168fc576d1d46dbd78c286919ec1 Mon Sep 17 00:00:00 2001 From: Jasmin F <73216762+jasmezz@users.noreply.github.com> Date: Thu, 28 Apr 2022 17:09:40 +0200 Subject: [PATCH 213/283] Update checksum in test.yml --- tests/modules/antismash/antismashlitedownloaddatabases/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/modules/antismash/antismashlitedownloaddatabases/test.yml b/tests/modules/antismash/antismashlitedownloaddatabases/test.yml index f9143163..b4a964a0 100644 --- a/tests/modules/antismash/antismashlitedownloaddatabases/test.yml +++ b/tests/modules/antismash/antismashlitedownloaddatabases/test.yml @@ -5,7 +5,7 @@ - antismash files: - path: output/antismash/versions.yml - md5sum: e2656c8d2bcc7469eba40eb1ee5c91b3 + md5sum: 24859c67023abab99de295d3675a24b6 - path: output/antismash/antismash_db - path: output/antismash/antismash_db/clusterblast - path: output/antismash/antismash_db/clustercompare From 5be6afd6b1e9b4362ad9f3e43e4a93e0ef4af423 Mon Sep 17 00:00:00 2001 From: "Jill V. Hagey, PhD" Date: Thu, 28 Apr 2022 12:29:39 -0400 Subject: [PATCH 214/283] changed container --- modules/srst2/srst2/main.nf | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/srst2/srst2/main.nf b/modules/srst2/srst2/main.nf index fc665fad..5f69205d 100644 --- a/modules/srst2/srst2/main.nf +++ b/modules/srst2/srst2/main.nf @@ -4,8 +4,8 @@ process SRST2_SRST2 { conda (params.enable_conda ? "bioconda::srst2=0.2.0" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/srst2%3A0.2.0--py_4': - 'quay.io/biocontainers/srst2:0.2.0--py_4'}" + 'https://depot.galaxyproject.org/singularity/srst2%3A0.2.0--py27_2': + 'quay.io/biocontainers/srst2:0.2.0--py27_2'}" input: tuple val(meta), path(fastq_s), path(db) From e29a69077d470734cd721be8932a1e5b6bf64ff4 Mon Sep 17 00:00:00 2001 From: "Jill V. Hagey, PhD" Date: Thu, 28 Apr 2022 12:30:04 -0400 Subject: [PATCH 215/283] add doi --- modules/srst2/srst2/meta.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/srst2/srst2/meta.yml b/modules/srst2/srst2/meta.yml index 31cf4de0..17d9226d 100644 --- a/modules/srst2/srst2/meta.yml +++ b/modules/srst2/srst2/meta.yml @@ -13,7 +13,7 @@ tools: homepage: { http://katholt.github.io/srst2/ } documentation: { https://github.com/katholt/srst2/blob/master/README.md } tool_dev_url: { https://github.com/katholt/srst2 } - doi: "" + doi: "10.1186/s13073-014-0090-6" licence: ["BSD"] input: From 9e7e10305b98e57f221de07d56bc7add2cfbd263 Mon Sep 17 00:00:00 2001 From: "Jill V. Hagey, PhD" Date: Thu, 28 Apr 2022 14:02:28 -0400 Subject: [PATCH 216/283] Update modules/srst2/srst2/meta.yml Co-authored-by: Sateesh Peri <33637490+sateeshperi@users.noreply.github.com> --- modules/srst2/srst2/meta.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/modules/srst2/srst2/meta.yml b/modules/srst2/srst2/meta.yml index 17d9226d..f70d3780 100644 --- a/modules/srst2/srst2/meta.yml +++ b/modules/srst2/srst2/meta.yml @@ -10,9 +10,9 @@ keywords: tools: - srst2: description: "Short Read Sequence Typing for Bacterial Pathogens" - homepage: { http://katholt.github.io/srst2/ } - documentation: { https://github.com/katholt/srst2/blob/master/README.md } - tool_dev_url: { https://github.com/katholt/srst2 } + homepage: "http://katholt.github.io/srst2/" + documentation: "https://github.com/katholt/srst2/blob/master/README.md" + tool_dev_url: "https://github.com/katholt/srst2" doi: "10.1186/s13073-014-0090-6" licence: ["BSD"] From 842ae3982bd2238cbf5a6e50874c287962573cd9 Mon Sep 17 00:00:00 2001 From: "Jill V. Hagey, PhD" Date: Thu, 28 Apr 2022 14:39:02 -0400 Subject: [PATCH 217/283] Update meta.yml --- modules/srst2/srst2/meta.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/srst2/srst2/meta.yml b/modules/srst2/srst2/meta.yml index f70d3780..3755fb34 100644 --- a/modules/srst2/srst2/meta.yml +++ b/modules/srst2/srst2/meta.yml @@ -49,11 +49,11 @@ output: pattern: "versions.yml" - txt: type: file - description: results text file + description: A detailed report, with one row per gene per sample described here: https://github.com/katholt/srst2#gene-typing pattern: "*_fullgenes_*_results.txt" - txt: type: file - description: results text file + description: A tabulated summary report of samples x genes. pattern: "*_genes_*_results.txt" - bam: type: file From 218769d7df89c78ed253f4a5a70b436e4de17eba Mon Sep 17 00:00:00 2001 From: "Jill V. Hagey, PhD" Date: Thu, 28 Apr 2022 14:41:37 -0400 Subject: [PATCH 218/283] Update modules/srst2/srst2/main.nf Co-authored-by: Edmund Miller --- modules/srst2/srst2/main.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/srst2/srst2/main.nf b/modules/srst2/srst2/main.nf index 5f69205d..4ee9f6a6 100644 --- a/modules/srst2/srst2/main.nf +++ b/modules/srst2/srst2/main.nf @@ -29,7 +29,7 @@ process SRST2_SRST2 { } else if (meta.db=="mlst") { database = "--mlst_db ${db}" } else { - return + error "Please set meta.db to either \"gene\" or \"mlst\"" } """ srst2 \\ From 6ca6de91f8da9b8cc9a34f7ef6469eb0f851604d Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Fri, 29 Apr 2022 07:47:24 +0200 Subject: [PATCH 219/283] bump picard version across the board --- modules/picard/addorreplacereadgroups/main.nf | 6 +++--- modules/picard/cleansam/main.nf | 6 +++--- modules/picard/collecthsmetrics/main.nf | 6 +++--- modules/picard/collectmultiplemetrics/main.nf | 6 +++--- modules/picard/collectwgsmetrics/main.nf | 6 +++--- modules/picard/createsequencedictionary/main.nf | 6 +++--- modules/picard/crosscheckfingerprints/main.nf | 6 +++--- modules/picard/filtersamreads/main.nf | 6 +++--- modules/picard/fixmateinformation/main.nf | 6 +++--- modules/picard/liftovervcf/main.nf | 6 +++--- modules/picard/markduplicates/main.nf | 6 +++--- modules/picard/mergesamfiles/main.nf | 6 +++--- modules/picard/sortsam/main.nf | 6 +++--- modules/picard/sortvcf/main.nf | 6 +++--- 14 files changed, 42 insertions(+), 42 deletions(-) diff --git a/modules/picard/addorreplacereadgroups/main.nf b/modules/picard/addorreplacereadgroups/main.nf index 8e1d10af..e395f4d7 100644 --- a/modules/picard/addorreplacereadgroups/main.nf +++ b/modules/picard/addorreplacereadgroups/main.nf @@ -2,10 +2,10 @@ process PICARD_ADDORREPLACEREADGROUPS { tag "$meta.id" label 'process_low' - conda (params.enable_conda ? "bioconda::picard=2.26.9" : null) + conda (params.enable_conda ? "bioconda::picard=2.27.1" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/picard:2.26.9--hdfd78af_0' : - 'quay.io/biocontainers/picard:2.26.9--hdfd78af_0' }" + 'https://depot.galaxyproject.org/singularity/picard:2.27.1--hdfd78af_0' : + 'quay.io/biocontainers/picard:2.27.1--hdfd78af_0' }" input: tuple val(meta), path(bam) diff --git a/modules/picard/cleansam/main.nf b/modules/picard/cleansam/main.nf index fb435911..4cd4db58 100644 --- a/modules/picard/cleansam/main.nf +++ b/modules/picard/cleansam/main.nf @@ -2,10 +2,10 @@ process PICARD_CLEANSAM { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? "bioconda::picard=2.26.9" : null) + conda (params.enable_conda ? "bioconda::picard=2.27.1" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/picard:2.26.9--hdfd78af_0' : - 'quay.io/biocontainers/picard:2.26.9--hdfd78af_0' }" + 'https://depot.galaxyproject.org/singularity/picard:2.27.1--hdfd78af_0' : + 'quay.io/biocontainers/picard:2.27.1--hdfd78af_0' }" input: tuple val(meta), path(bam) diff --git a/modules/picard/collecthsmetrics/main.nf b/modules/picard/collecthsmetrics/main.nf index ef7a9b9f..26ba8a04 100644 --- a/modules/picard/collecthsmetrics/main.nf +++ b/modules/picard/collecthsmetrics/main.nf @@ -2,10 +2,10 @@ process PICARD_COLLECTHSMETRICS { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? "bioconda::picard=2.26.10" : null) + conda (params.enable_conda ? "bioconda::picard=2.27.1" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/picard:2.26.10--hdfd78af_0' : - 'quay.io/biocontainers/picard:2.26.10--hdfd78af_0' }" + 'https://depot.galaxyproject.org/singularity/picard:2.27.1--hdfd78af_0' : + 'quay.io/biocontainers/picard:2.27.1--hdfd78af_0' }" input: tuple val(meta), path(bam) diff --git a/modules/picard/collectmultiplemetrics/main.nf b/modules/picard/collectmultiplemetrics/main.nf index 340463a8..0c79b7a5 100644 --- a/modules/picard/collectmultiplemetrics/main.nf +++ b/modules/picard/collectmultiplemetrics/main.nf @@ -2,10 +2,10 @@ process PICARD_COLLECTMULTIPLEMETRICS { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? "bioconda::picard=2.26.10" : null) + conda (params.enable_conda ? "bioconda::picard=2.27.1" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/picard:2.26.10--hdfd78af_0' : - 'quay.io/biocontainers/picard:2.26.10--hdfd78af_0' }" + 'https://depot.galaxyproject.org/singularity/picard:2.27.1--hdfd78af_0' : + 'quay.io/biocontainers/picard:2.27.1--hdfd78af_0' }" input: tuple val(meta), path(bam) diff --git a/modules/picard/collectwgsmetrics/main.nf b/modules/picard/collectwgsmetrics/main.nf index f4efaa4c..ef53d7bd 100644 --- a/modules/picard/collectwgsmetrics/main.nf +++ b/modules/picard/collectwgsmetrics/main.nf @@ -2,10 +2,10 @@ process PICARD_COLLECTWGSMETRICS { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? "bioconda::picard=2.26.10" : null) + conda (params.enable_conda ? "bioconda::picard=2.27.1" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/picard:2.26.10--hdfd78af_0' : - 'quay.io/biocontainers/picard:2.26.10--hdfd78af_0' }" + 'https://depot.galaxyproject.org/singularity/picard:2.27.1--hdfd78af_0' : + 'quay.io/biocontainers/picard:2.27.1--hdfd78af_0' }" input: tuple val(meta), path(bam), path(bai) diff --git a/modules/picard/createsequencedictionary/main.nf b/modules/picard/createsequencedictionary/main.nf index 96069e9f..3a8f1477 100644 --- a/modules/picard/createsequencedictionary/main.nf +++ b/modules/picard/createsequencedictionary/main.nf @@ -2,10 +2,10 @@ process PICARD_CREATESEQUENCEDICTIONARY { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? "bioconda::picard=2.26.9" : null) + conda (params.enable_conda ? "bioconda::picard=2.27.1" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/picard:2.26.9--hdfd78af_0' : - 'quay.io/biocontainers/picard:2.26.9--hdfd78af_0' }" + 'https://depot.galaxyproject.org/singularity/picard:2.27.1--hdfd78af_0' : + 'quay.io/biocontainers/picard:2.27.1--hdfd78af_0' }" input: tuple val(meta), path(fasta) diff --git a/modules/picard/crosscheckfingerprints/main.nf b/modules/picard/crosscheckfingerprints/main.nf index b3dface5..d3b59402 100644 --- a/modules/picard/crosscheckfingerprints/main.nf +++ b/modules/picard/crosscheckfingerprints/main.nf @@ -2,10 +2,10 @@ process PICARD_CROSSCHECKFINGERPRINTS { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? "bioconda::picard=2.26.10" : null) + conda (params.enable_conda ? "bioconda::picard=2.27.1" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/picard:2.26.10--hdfd78af_0' : - 'quay.io/biocontainers/picard:2.26.10--hdfd78af_0' }" + 'https://depot.galaxyproject.org/singularity/picard:2.27.1--hdfd78af_0' : + 'quay.io/biocontainers/picard:2.27.1--hdfd78af_0' }" input: tuple val(meta), path(input1) diff --git a/modules/picard/filtersamreads/main.nf b/modules/picard/filtersamreads/main.nf index ab75abfa..adedcdc2 100644 --- a/modules/picard/filtersamreads/main.nf +++ b/modules/picard/filtersamreads/main.nf @@ -2,10 +2,10 @@ process PICARD_FILTERSAMREADS { tag "$meta.id" label 'process_low' - conda (params.enable_conda ? "bioconda::picard=2.26.10" : null) + conda (params.enable_conda ? "bioconda::picard=2.27.1" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/picard:2.26.10--hdfd78af_0' : - 'quay.io/biocontainers/picard:2.26.10--hdfd78af_0' }" + 'https://depot.galaxyproject.org/singularity/picard:2.27.1--hdfd78af_0' : + 'quay.io/biocontainers/picard:2.27.1--hdfd78af_0' }" input: tuple val(meta), path(bam), path(readlist) diff --git a/modules/picard/fixmateinformation/main.nf b/modules/picard/fixmateinformation/main.nf index 763f3bb4..1993e4fd 100644 --- a/modules/picard/fixmateinformation/main.nf +++ b/modules/picard/fixmateinformation/main.nf @@ -2,10 +2,10 @@ process PICARD_FIXMATEINFORMATION { tag "$meta.id" label 'process_low' - conda (params.enable_conda ? "bioconda::picard=2.26.9" : null) + conda (params.enable_conda ? "bioconda::picard=2.27.1" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/picard:2.26.9--hdfd78af_0' : - 'quay.io/biocontainers/picard:2.26.9--hdfd78af_0' }" + 'https://depot.galaxyproject.org/singularity/picard:2.27.1--hdfd78af_0' : + 'quay.io/biocontainers/picard:2.27.1--hdfd78af_0' }" input: tuple val(meta), path(bam) diff --git a/modules/picard/liftovervcf/main.nf b/modules/picard/liftovervcf/main.nf index cdbd637e..7ce694e9 100644 --- a/modules/picard/liftovervcf/main.nf +++ b/modules/picard/liftovervcf/main.nf @@ -2,10 +2,10 @@ process PICARD_LIFTOVERVCF { tag "$meta.id" label 'process_low' - conda (params.enable_conda ? "bioconda::picard=2.26.10" : null) + conda (params.enable_conda ? "bioconda::picard=2.27.1" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/picard:2.26.10--hdfd78af_0' : - 'quay.io/biocontainers/picard:2.26.10--hdfd78af_0' }" + 'https://depot.galaxyproject.org/singularity/picard:2.27.1--hdfd78af_0' : + 'quay.io/biocontainers/picard:2.27.1--hdfd78af_0' }" input: tuple val(meta), path(input_vcf) diff --git a/modules/picard/markduplicates/main.nf b/modules/picard/markduplicates/main.nf index e754a587..b9fec576 100644 --- a/modules/picard/markduplicates/main.nf +++ b/modules/picard/markduplicates/main.nf @@ -2,10 +2,10 @@ process PICARD_MARKDUPLICATES { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? "bioconda::picard=2.26.10" : null) + conda (params.enable_conda ? "bioconda::picard=2.27.1" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/picard:2.26.10--hdfd78af_0' : - 'quay.io/biocontainers/picard:2.26.10--hdfd78af_0' }" + 'https://depot.galaxyproject.org/singularity/picard:2.27.1--hdfd78af_0' : + 'quay.io/biocontainers/picard:2.27.1--hdfd78af_0' }" input: tuple val(meta), path(bam) diff --git a/modules/picard/mergesamfiles/main.nf b/modules/picard/mergesamfiles/main.nf index ef5c3980..7b0185cd 100644 --- a/modules/picard/mergesamfiles/main.nf +++ b/modules/picard/mergesamfiles/main.nf @@ -2,10 +2,10 @@ process PICARD_MERGESAMFILES { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? "bioconda::picard=2.26.10" : null) + conda (params.enable_conda ? "bioconda::picard=2.27.1" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/picard:2.26.10--hdfd78af_0' : - 'quay.io/biocontainers/picard:2.26.10--hdfd78af_0' }" + 'https://depot.galaxyproject.org/singularity/picard:2.27.1--hdfd78af_0' : + 'quay.io/biocontainers/picard:2.27.1--hdfd78af_0' }" input: tuple val(meta), path(bams) diff --git a/modules/picard/sortsam/main.nf b/modules/picard/sortsam/main.nf index adec17cb..cee60fd1 100644 --- a/modules/picard/sortsam/main.nf +++ b/modules/picard/sortsam/main.nf @@ -2,10 +2,10 @@ process PICARD_SORTSAM { tag "$meta.id" label 'process_low' - conda (params.enable_conda ? "bioconda::picard=2.26.10" : null) + conda (params.enable_conda ? "bioconda::picard=2.27.1" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/picard:2.26.10--hdfd78af_0' : - 'quay.io/biocontainers/picard:2.26.10--hdfd78af_0' }" + 'https://depot.galaxyproject.org/singularity/picard:2.27.1--hdfd78af_0' : + 'quay.io/biocontainers/picard:2.27.1--hdfd78af_0' }" input: tuple val(meta), path(bam) diff --git a/modules/picard/sortvcf/main.nf b/modules/picard/sortvcf/main.nf index 4047545e..5fe0ecfd 100644 --- a/modules/picard/sortvcf/main.nf +++ b/modules/picard/sortvcf/main.nf @@ -2,10 +2,10 @@ process PICARD_SORTVCF { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? "bioconda::picard=2.26.10" : null) + conda (params.enable_conda ? "bioconda::picard=2.27.1" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/picard:2.26.10--hdfd78af_0' : - 'quay.io/biocontainers/picard:2.26.10--hdfd78af_0' }" + 'https://depot.galaxyproject.org/singularity/picard:2.27.1--hdfd78af_0' : + 'quay.io/biocontainers/picard:2.27.1--hdfd78af_0' }" input: tuple val(meta), path(vcf) From 00467ac64affbbc17b255762f10a6c8fa362aabc Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Fri, 29 Apr 2022 07:57:11 +0200 Subject: [PATCH 220/283] update command line syntax for all tools --- modules/picard/addorreplacereadgroups/main.nf | 2 +- modules/picard/collecthsmetrics/main.nf | 8 ++++---- modules/picard/collectmultiplemetrics/main.nf | 6 +++--- modules/picard/collectwgsmetrics/main.nf | 6 +++--- modules/picard/createsequencedictionary/main.nf | 4 ++-- modules/picard/liftovervcf/main.nf | 10 +++++----- modules/picard/markduplicates/main.nf | 6 +++--- 7 files changed, 21 insertions(+), 21 deletions(-) diff --git a/modules/picard/addorreplacereadgroups/main.nf b/modules/picard/addorreplacereadgroups/main.nf index e395f4d7..55200d2e 100644 --- a/modules/picard/addorreplacereadgroups/main.nf +++ b/modules/picard/addorreplacereadgroups/main.nf @@ -43,7 +43,7 @@ process PICARD_ADDORREPLACEREADGROUPS { -PL ${PLATFORM} \\ -PU ${BARCODE} \\ -SM ${SAMPLE} \\ - -CREATE_INDEX true + --CREATE_INDEX true cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/modules/picard/collecthsmetrics/main.nf b/modules/picard/collecthsmetrics/main.nf index 26ba8a04..317aff4b 100644 --- a/modules/picard/collecthsmetrics/main.nf +++ b/modules/picard/collecthsmetrics/main.nf @@ -38,10 +38,10 @@ process PICARD_COLLECTHSMETRICS { CollectHsMetrics \\ $args \\ $reference \\ - -BAIT_INTERVALS $bait_intervals \\ - -TARGET_INTERVALS $target_intervals \\ - -INPUT $bam \\ - -OUTPUT ${prefix}.CollectHsMetrics.coverage_metrics + --BAIT_INTERVALS $bait_intervals \\ + --TARGET_INTERVALS $target_intervals \\ + --INPUT $bam \\ + --OUTPUT ${prefix}.CollectHsMetrics.coverage_metrics cat <<-END_VERSIONS > versions.yml diff --git a/modules/picard/collectmultiplemetrics/main.nf b/modules/picard/collectmultiplemetrics/main.nf index 0c79b7a5..a653b549 100644 --- a/modules/picard/collectmultiplemetrics/main.nf +++ b/modules/picard/collectmultiplemetrics/main.nf @@ -33,9 +33,9 @@ process PICARD_COLLECTMULTIPLEMETRICS { -Xmx${avail_mem}g \\ CollectMultipleMetrics \\ $args \\ - INPUT=$bam \\ - OUTPUT=${prefix}.CollectMultipleMetrics \\ - REFERENCE_SEQUENCE=$fasta + --INPUT $bam \\ + --OUTPUT ${prefix}.CollectMultipleMetrics \\ + --REFERENCE_SEQUENCE $fasta cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/modules/picard/collectwgsmetrics/main.nf b/modules/picard/collectwgsmetrics/main.nf index ef53d7bd..957c8058 100644 --- a/modules/picard/collectwgsmetrics/main.nf +++ b/modules/picard/collectwgsmetrics/main.nf @@ -32,9 +32,9 @@ process PICARD_COLLECTWGSMETRICS { -Xmx${avail_mem}g \\ CollectWgsMetrics \\ $args \\ - INPUT=$bam \\ - OUTPUT=${prefix}.CollectWgsMetrics.coverage_metrics \\ - REFERENCE_SEQUENCE=$fasta + --INPUT $bam \\ + --OUTPUT ${prefix}.CollectWgsMetrics.coverage_metrics \\ + --REFERENCE_SEQUENCE $fasta cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/modules/picard/createsequencedictionary/main.nf b/modules/picard/createsequencedictionary/main.nf index 3a8f1477..3a8bb62a 100644 --- a/modules/picard/createsequencedictionary/main.nf +++ b/modules/picard/createsequencedictionary/main.nf @@ -31,8 +31,8 @@ process PICARD_CREATESEQUENCEDICTIONARY { -Xmx${avail_mem}g \\ CreateSequenceDictionary \\ $args \\ - R=$fasta \\ - O=${prefix}.dict + -R $fasta \\ + -O ${prefix}.dict cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/modules/picard/liftovervcf/main.nf b/modules/picard/liftovervcf/main.nf index 7ce694e9..ea3f4cb7 100644 --- a/modules/picard/liftovervcf/main.nf +++ b/modules/picard/liftovervcf/main.nf @@ -35,11 +35,11 @@ process PICARD_LIFTOVERVCF { -Xmx${avail_mem}g \\ LiftoverVcf \\ $args \\ - I=$input_vcf \\ - O=${prefix}.lifted.vcf.gz \\ - CHAIN=$chain \\ - REJECT=${prefix}.unlifted.vcf.gz \\ - R=$fasta + -I $input_vcf \\ + -O ${prefix}.lifted.vcf.gz \\ + --CHAIN $chain \\ + --REJECT ${prefix}.unlifted.vcf.gz \\ + -R $fasta cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/modules/picard/markduplicates/main.nf b/modules/picard/markduplicates/main.nf index b9fec576..58b6b88a 100644 --- a/modules/picard/markduplicates/main.nf +++ b/modules/picard/markduplicates/main.nf @@ -33,9 +33,9 @@ process PICARD_MARKDUPLICATES { -Xmx${avail_mem}g \\ MarkDuplicates \\ $args \\ - I=$bam \\ - O=${prefix}.bam \\ - M=${prefix}.MarkDuplicates.metrics.txt + -I $bam \\ + -O ${prefix}.bam \\ + -M ${prefix}.MarkDuplicates.metrics.txt cat <<-END_VERSIONS > versions.yml "${task.process}": From b72ec05d69140c6a9c6054175d1412b2f116466f Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Fri, 29 Apr 2022 08:19:42 +0200 Subject: [PATCH 221/283] drop md5sum from versions.yml --- tests/modules/picard/addorreplacereadgroups/test.yml | 1 - tests/modules/picard/cleansam/test.yml | 1 - tests/modules/picard/createsequencedictionary/test.yml | 1 - 3 files changed, 3 deletions(-) diff --git a/tests/modules/picard/addorreplacereadgroups/test.yml b/tests/modules/picard/addorreplacereadgroups/test.yml index aa1536bb..6ee81737 100644 --- a/tests/modules/picard/addorreplacereadgroups/test.yml +++ b/tests/modules/picard/addorreplacereadgroups/test.yml @@ -7,4 +7,3 @@ - path: output/picard/test.bam md5sum: 7b82f3461c2d80fc6a10385e78c9427f - path: output/picard/versions.yml - md5sum: 8a2d176295e1343146ea433c79bb517f diff --git a/tests/modules/picard/cleansam/test.yml b/tests/modules/picard/cleansam/test.yml index 3b235d07..08dcd84d 100644 --- a/tests/modules/picard/cleansam/test.yml +++ b/tests/modules/picard/cleansam/test.yml @@ -7,4 +7,3 @@ - path: output/picard/test.bam md5sum: a48f8e77a1480445efc57570c3a38a68 - path: output/picard/versions.yml - md5sum: e6457d7c6de51bf6f4b577eda65e57ac diff --git a/tests/modules/picard/createsequencedictionary/test.yml b/tests/modules/picard/createsequencedictionary/test.yml index 2a43be41..59f2dd44 100644 --- a/tests/modules/picard/createsequencedictionary/test.yml +++ b/tests/modules/picard/createsequencedictionary/test.yml @@ -7,4 +7,3 @@ - path: output/picard/test.dict contains: ["SN:MT192765.1"] - path: output/picard/versions.yml - md5sum: b3d8c7ea65b8a6d3237b153d13fe2014 From ec10f98d558a84da79ba7c90c2d57f6c6f0f7af5 Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Fri, 29 Apr 2022 09:32:32 +0200 Subject: [PATCH 222/283] picard CollectWgsMetrics: update to new cli args (#1578) * update to new cli args * remove bam index from tuple, not needed by program * bump picard version Co-authored-by: Sateesh Peri <33637490+sateeshperi@users.noreply.github.com> --- modules/picard/collectwgsmetrics/main.nf | 14 +++++++------- tests/modules/picard/collectwgsmetrics/main.nf | 3 +-- 2 files changed, 8 insertions(+), 9 deletions(-) diff --git a/modules/picard/collectwgsmetrics/main.nf b/modules/picard/collectwgsmetrics/main.nf index f4efaa4c..d9c1ec7c 100644 --- a/modules/picard/collectwgsmetrics/main.nf +++ b/modules/picard/collectwgsmetrics/main.nf @@ -2,13 +2,13 @@ process PICARD_COLLECTWGSMETRICS { tag "$meta.id" label 'process_medium' - conda (params.enable_conda ? "bioconda::picard=2.26.10" : null) + conda (params.enable_conda ? "bioconda::picard=2.27.1" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/picard:2.26.10--hdfd78af_0' : - 'quay.io/biocontainers/picard:2.26.10--hdfd78af_0' }" + 'https://depot.galaxyproject.org/singularity/picard:2.27.1--hdfd78af_0' : + 'quay.io/biocontainers/picard:2.27.1--hdfd78af_0' }" input: - tuple val(meta), path(bam), path(bai) + tuple val(meta), path(bam) path fasta output: @@ -32,9 +32,9 @@ process PICARD_COLLECTWGSMETRICS { -Xmx${avail_mem}g \\ CollectWgsMetrics \\ $args \\ - INPUT=$bam \\ - OUTPUT=${prefix}.CollectWgsMetrics.coverage_metrics \\ - REFERENCE_SEQUENCE=$fasta + -INPUT $bam \\ + -OUTPUT ${prefix}.CollectWgsMetrics.coverage_metrics \\ + -REFERENCE_SEQUENCE $fasta cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/tests/modules/picard/collectwgsmetrics/main.nf b/tests/modules/picard/collectwgsmetrics/main.nf index 1d75a2bd..eddf5603 100644 --- a/tests/modules/picard/collectwgsmetrics/main.nf +++ b/tests/modules/picard/collectwgsmetrics/main.nf @@ -6,8 +6,7 @@ include { PICARD_COLLECTWGSMETRICS } from '../../../../modules/picard/collectwgs workflow test_picard_collectwgsmetrics { input = [ [ id:'test', single_end:false ], // meta map - file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam_bai'], checkIfExists: true) + file(params.test_data['sarscov2']['illumina']['test_paired_end_sorted_bam'], checkIfExists: true), ] fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) From 1a5a9e7b4009dcf34e6867dd1a5a1d9a718b027b Mon Sep 17 00:00:00 2001 From: Lauri Mesilaakso Date: Fri, 29 Apr 2022 09:37:23 +0200 Subject: [PATCH 223/283] Fix typo (#1586) --- modules/minimap2/align/main.nf | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/minimap2/align/main.nf b/modules/minimap2/align/main.nf index 7ba05ee9..08ac6eef 100644 --- a/modules/minimap2/align/main.nf +++ b/modules/minimap2/align/main.nf @@ -27,8 +27,8 @@ process MINIMAP2_ALIGN { def prefix = task.ext.prefix ?: "${meta.id}" def input_reads = meta.single_end ? "$reads" : "${reads[0]} ${reads[1]}" def bam_output = bam_format ? "-a | samtools sort | samtools view -@ ${task.cpus} -b -h -o ${prefix}.bam" : "-o ${prefix}.paf" - def cigar_paf = cigar_paf_format && !sam_format ? "-c" : '' - def set_cigar_bam = cigar_bam && sam_format ? "-L" : '' + def cigar_paf = cigar_paf_format && !bam_format ? "-c" : '' + def set_cigar_bam = cigar_bam && bam_format ? "-L" : '' """ minimap2 \\ $args \\ From 0e01d703ea55899bd048689258d22c153d39b981 Mon Sep 17 00:00:00 2001 From: CMGG ICT Team Date: Fri, 29 Apr 2022 10:12:52 +0200 Subject: [PATCH 224/283] remove version md5sum from fixmateinformation --- tests/modules/picard/fixmateinformation/test.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/modules/picard/fixmateinformation/test.yml b/tests/modules/picard/fixmateinformation/test.yml index f12f823b..f2f9c491 100644 --- a/tests/modules/picard/fixmateinformation/test.yml +++ b/tests/modules/picard/fixmateinformation/test.yml @@ -7,4 +7,3 @@ - path: output/picard/test.bam md5sum: 746102e8c242c0ef42e045c49d320030 - path: output/picard/versions.yml - md5sum: 4329ba7cdca8f4f6018dfd5c019ba2eb From bda132191db9548cd1fc1c594399cba583bcd061 Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Fri, 29 Apr 2022 10:13:22 +0200 Subject: [PATCH 225/283] fix args in nextflow.config --- tests/modules/picard/markduplicates/nextflow.config | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/modules/picard/markduplicates/nextflow.config b/tests/modules/picard/markduplicates/nextflow.config index 9178c5b1..40d46110 100644 --- a/tests/modules/picard/markduplicates/nextflow.config +++ b/tests/modules/picard/markduplicates/nextflow.config @@ -3,7 +3,7 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } withName: PICARD_MARKDUPLICATES_UNSORTED { - ext.args = 'ASSUME_SORT_ORDER=queryname' + ext.args = '--ASSUME_SORT_ORDER queryname' } } From b2e4342105d4e6b06edd33ed2613328eb740b4b8 Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Fri, 29 Apr 2022 10:21:22 +0200 Subject: [PATCH 226/283] fix args in nextflow.config --- tests/modules/picard/liftovervcf/nextflow.config | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/modules/picard/liftovervcf/nextflow.config b/tests/modules/picard/liftovervcf/nextflow.config index e1581bb9..f69fc351 100644 --- a/tests/modules/picard/liftovervcf/nextflow.config +++ b/tests/modules/picard/liftovervcf/nextflow.config @@ -1,5 +1,5 @@ process { - ext.args = "WARN_ON_MISSING_CONTIG=true" + ext.args = "--WARN_ON_MISSING_CONTIG true" publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } } From 42564565b934eeb2449e35ec97ed13ff2a67f1de Mon Sep 17 00:00:00 2001 From: James Fellows Yates Date: Fri, 29 Apr 2022 10:26:17 +0200 Subject: [PATCH 227/283] Update diamond/blastx diamond/blastp to support all possible output formats --- modules/diamond/blastp/main.nf | 17 ++++++++++++++--- modules/diamond/blastp/meta.yml | 9 +++++++++ modules/diamond/blastx/main.nf | 17 ++++++++++++++--- modules/diamond/blastx/meta.yml | 9 +++++++++ tests/modules/diamond/blastp/main.nf | 13 ++++++++++++- tests/modules/diamond/blastp/test.yml | 21 ++++++++++++++++++--- tests/modules/diamond/blastx/main.nf | 13 ++++++++++++- tests/modules/diamond/blastx/test.yml | 24 ++++++++++++++++++++---- 8 files changed, 108 insertions(+), 15 deletions(-) diff --git a/modules/diamond/blastp/main.nf b/modules/diamond/blastp/main.nf index 955952dc..d7c53d6f 100644 --- a/modules/diamond/blastp/main.nf +++ b/modules/diamond/blastp/main.nf @@ -11,10 +11,11 @@ process DIAMOND_BLASTP { input: tuple val(meta), path(fasta) - path db + path db + val outext output: - tuple val(meta), path('*.txt'), emit: txt + tuple val(meta), path('*.{blast,xml,txt,daa,sam,tsv,paf}'), emit: output path "versions.yml" , emit: versions when: @@ -23,6 +24,15 @@ process DIAMOND_BLASTP { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" + switch ( outext ) { + case "blast": outfmt = 0; break + case "xml": outfmt = 5; break + case "txt": outfmt = 6; break + case "daa": outfmt = 100; break + case "sam": outfmt = 101; break + case "tsv": outfmt = 102; break + case "paf": outfmt = 103; break + } """ DB=`find -L ./ -name "*.dmnd" | sed 's/.dmnd//'` @@ -31,8 +41,9 @@ process DIAMOND_BLASTP { --threads $task.cpus \\ --db \$DB \\ --query $fasta \\ + --outfmt ${outfmt} \\ $args \\ - --out ${prefix}.txt + --out ${prefix}.${outext} cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/modules/diamond/blastp/meta.yml b/modules/diamond/blastp/meta.yml index 8ac1d371..1aa36c23 100644 --- a/modules/diamond/blastp/meta.yml +++ b/modules/diamond/blastp/meta.yml @@ -28,6 +28,14 @@ input: type: directory description: Directory containing the protein blast database pattern: "*" + - outext: + type: string + description: | + Specify the type of output file to be generated. `blast` corresponds to + BLAST pairwise format. `xml` corresponds to BLAST xml format. + `txt` corresponds to to BLAST tabular format. `tsv` corresponds to + taxonomic classification format. + pattern: "blast|xml|txt|daa|sam|tsv|paf" output: - txt: @@ -41,3 +49,4 @@ output: authors: - "@spficklin" + - "@jfy133" diff --git a/modules/diamond/blastx/main.nf b/modules/diamond/blastx/main.nf index 3700bd36..6703c1e4 100644 --- a/modules/diamond/blastx/main.nf +++ b/modules/diamond/blastx/main.nf @@ -11,10 +11,11 @@ process DIAMOND_BLASTX { input: tuple val(meta), path(fasta) - path db + path db + val outext output: - tuple val(meta), path('*.txt'), emit: txt + tuple val(meta), path('*.{blast,xml,txt,daa,sam,tsv,paf}'), emit: output path "versions.yml" , emit: versions when: @@ -23,6 +24,15 @@ process DIAMOND_BLASTX { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" + switch ( outext ) { + case "blast": outfmt = 0; break + case "xml": outfmt = 5; break + case "txt": outfmt = 6; break + case "daa": outfmt = 100; break + case "sam": outfmt = 101; break + case "tsv": outfmt = 102; break + case "paf": outfmt = 103; break + } """ DB=`find -L ./ -name "*.dmnd" | sed 's/.dmnd//'` @@ -31,8 +41,9 @@ process DIAMOND_BLASTX { --threads $task.cpus \\ --db \$DB \\ --query $fasta \\ + --outfmt ${outfmt} \\ $args \\ - --out ${prefix}.txt + --out ${prefix}.${outext} cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/modules/diamond/blastx/meta.yml b/modules/diamond/blastx/meta.yml index 7eb3d7b4..5ee2d55e 100644 --- a/modules/diamond/blastx/meta.yml +++ b/modules/diamond/blastx/meta.yml @@ -28,6 +28,14 @@ input: type: directory description: Directory containing the nucelotide blast database pattern: "*" + - outext: + type: string + description: | + Specify the type of output file to be generated. `blast` corresponds to + BLAST pairwise format. `xml` corresponds to BLAST xml format. + `txt` corresponds to to BLAST tabular format. `tsv` corresponds to + taxonomic classification format. + pattern: "blast|xml|txt|daa|sam|tsv|paf" output: - txt: @@ -41,3 +49,4 @@ output: authors: - "@spficklin" + - "@jfy133" diff --git a/tests/modules/diamond/blastp/main.nf b/tests/modules/diamond/blastp/main.nf index 87d05bf9..5c1459d8 100644 --- a/tests/modules/diamond/blastp/main.nf +++ b/tests/modules/diamond/blastp/main.nf @@ -9,7 +9,18 @@ workflow test_diamond_blastp { db = [ file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ] fasta = [ file(params.test_data['sarscov2']['genome']['transcriptome_fasta'], checkIfExists: true) ] + outext = 'txt' DIAMOND_MAKEDB ( db ) - DIAMOND_BLASTP ( [ [id:'test'], fasta ], DIAMOND_MAKEDB.out.db ) + DIAMOND_BLASTP ( [ [id:'test'], fasta ], DIAMOND_MAKEDB.out.db, outext ) +} + +workflow test_diamond_blastp_daa { + + db = [ file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ] + fasta = [ file(params.test_data['sarscov2']['genome']['transcriptome_fasta'], checkIfExists: true) ] + outext = 'daa' + + DIAMOND_MAKEDB ( db ) + DIAMOND_BLASTP ( [ [id:'test'], fasta ], DIAMOND_MAKEDB.out.db, outext ) } diff --git a/tests/modules/diamond/blastp/test.yml b/tests/modules/diamond/blastp/test.yml index 32dfacaa..5e31df1f 100644 --- a/tests/modules/diamond/blastp/test.yml +++ b/tests/modules/diamond/blastp/test.yml @@ -1,8 +1,23 @@ -- name: diamond blastp - command: nextflow run ./tests/modules/diamond/blastp -entry test_diamond_blastp -c ./tests/config/nextflow.config -c ./tests/modules/diamond/blastp/nextflow.config +- name: diamond blastp test_diamond_blastp + command: nextflow run tests/modules/diamond/blastp -entry test_diamond_blastp -c tests/config/nextflow.config tags: - diamond - diamond/blastp files: - - path: ./output/diamond/test.diamond_blastp.txt + - path: output/diamond/genome.fasta.dmnd + md5sum: 2447fb376394c20d43ea3aad2aa5d15d + - path: output/diamond/test.diamond_blastp.txt md5sum: 3ca7f6290c1d8741c573370e6f8b4db0 + - path: output/diamond/versions.yml + +- name: diamond blastp test_diamond_blastp_daa + command: nextflow run tests/modules/diamond/blastp -entry test_diamond_blastp_daa -c tests/config/nextflow.config + tags: + - diamond + - diamond/blastp + files: + - path: output/diamond/genome.fasta.dmnd + md5sum: 2447fb376394c20d43ea3aad2aa5d15d + - path: output/diamond/test.diamond_blastp.daa + md5sum: d4a79ad1fcb2ec69460e5a09a9468db7 + - path: output/diamond/versions.yml diff --git a/tests/modules/diamond/blastx/main.nf b/tests/modules/diamond/blastx/main.nf index 77eb08ea..d6d5a77a 100644 --- a/tests/modules/diamond/blastx/main.nf +++ b/tests/modules/diamond/blastx/main.nf @@ -9,7 +9,18 @@ workflow test_diamond_blastx { db = [ file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ] fasta = [ file(params.test_data['sarscov2']['genome']['transcriptome_fasta'], checkIfExists: true) ] + outext = 'txt' DIAMOND_MAKEDB ( db ) - DIAMOND_BLASTX ( [ [id:'test'], fasta ], DIAMOND_MAKEDB.out.db ) + DIAMOND_BLASTX ( [ [id:'test'], fasta ], DIAMOND_MAKEDB.out.db, outext ) +} + +workflow test_diamond_blastx_daa { + + db = [ file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ] + fasta = [ file(params.test_data['sarscov2']['genome']['transcriptome_fasta'], checkIfExists: true) ] + outext = 'daa' + + DIAMOND_MAKEDB ( db ) + DIAMOND_BLASTX ( [ [id:'test'], fasta ], DIAMOND_MAKEDB.out.db, outext ) } diff --git a/tests/modules/diamond/blastx/test.yml b/tests/modules/diamond/blastx/test.yml index fe7c6938..6a792f30 100644 --- a/tests/modules/diamond/blastx/test.yml +++ b/tests/modules/diamond/blastx/test.yml @@ -1,8 +1,24 @@ -- name: diamond blastx - command: nextflow run ./tests/modules/diamond/blastx -entry test_diamond_blastx -c ./tests/config/nextflow.config -c ./tests/modules/diamond/blastx/nextflow.config +- name: diamond blastx test_diamond_blastx + command: nextflow run tests/modules/diamond/blastx -entry test_diamond_blastx -c tests/config/nextflow.config tags: - diamond - diamond/blastx files: - - path: ./output/diamond/test.diamond_blastx.txt - md5sum: d41d8cd98f00b204e9800998ecf8427e + - path: output/diamond/genome.fasta.dmnd + md5sum: 2447fb376394c20d43ea3aad2aa5d15d + - path: output/diamond/test.diamond_blastx.txt + - path: output/diamond/versions.yml + md5sum: 747934f57b7c0f8901570500f206eac6 + +- name: diamond blastx test_diamond_blastx_daa + command: nextflow run tests/modules/diamond/blastx -entry test_diamond_blastx_daa -c tests/config/nextflow.config + tags: + - diamond + - diamond/blastx + files: + - path: output/diamond/genome.fasta.dmnd + md5sum: 2447fb376394c20d43ea3aad2aa5d15d + - path: output/diamond/test.diamond_blastx.daa + md5sum: 2a0ce0f7e01dcead828b87d5cbaccf7a + - path: output/diamond/versions.yml + md5sum: 05cbabfd500fc17e26b3d8061c5a78c3 From 7df2fae7469095b2f211f4e183cd5f7de77a2b98 Mon Sep 17 00:00:00 2001 From: James Fellows Yates Date: Fri, 29 Apr 2022 10:26:38 +0200 Subject: [PATCH 228/283] Clean up test data --- tests/modules/diamond/blastp/test.yml | 4 ---- tests/modules/diamond/blastx/test.yml | 6 ------ 2 files changed, 10 deletions(-) diff --git a/tests/modules/diamond/blastp/test.yml b/tests/modules/diamond/blastp/test.yml index 5e31df1f..cb297743 100644 --- a/tests/modules/diamond/blastp/test.yml +++ b/tests/modules/diamond/blastp/test.yml @@ -4,8 +4,6 @@ - diamond - diamond/blastp files: - - path: output/diamond/genome.fasta.dmnd - md5sum: 2447fb376394c20d43ea3aad2aa5d15d - path: output/diamond/test.diamond_blastp.txt md5sum: 3ca7f6290c1d8741c573370e6f8b4db0 - path: output/diamond/versions.yml @@ -16,8 +14,6 @@ - diamond - diamond/blastp files: - - path: output/diamond/genome.fasta.dmnd - md5sum: 2447fb376394c20d43ea3aad2aa5d15d - path: output/diamond/test.diamond_blastp.daa md5sum: d4a79ad1fcb2ec69460e5a09a9468db7 - path: output/diamond/versions.yml diff --git a/tests/modules/diamond/blastx/test.yml b/tests/modules/diamond/blastx/test.yml index 6a792f30..a2d35c80 100644 --- a/tests/modules/diamond/blastx/test.yml +++ b/tests/modules/diamond/blastx/test.yml @@ -4,11 +4,8 @@ - diamond - diamond/blastx files: - - path: output/diamond/genome.fasta.dmnd - md5sum: 2447fb376394c20d43ea3aad2aa5d15d - path: output/diamond/test.diamond_blastx.txt - path: output/diamond/versions.yml - md5sum: 747934f57b7c0f8901570500f206eac6 - name: diamond blastx test_diamond_blastx_daa command: nextflow run tests/modules/diamond/blastx -entry test_diamond_blastx_daa -c tests/config/nextflow.config @@ -16,9 +13,6 @@ - diamond - diamond/blastx files: - - path: output/diamond/genome.fasta.dmnd - md5sum: 2447fb376394c20d43ea3aad2aa5d15d - path: output/diamond/test.diamond_blastx.daa md5sum: 2a0ce0f7e01dcead828b87d5cbaccf7a - path: output/diamond/versions.yml - md5sum: 05cbabfd500fc17e26b3d8061c5a78c3 From 20ebb89ff97a2665106be9cace5ccb9aa4eed1be Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Fri, 29 Apr 2022 10:52:40 +0200 Subject: [PATCH 229/283] update to long args --- modules/picard/addorreplacereadgroups/main.nf | 10 +++++----- modules/picard/cleansam/main.nf | 4 ++-- modules/picard/createsequencedictionary/main.nf | 4 ++-- modules/picard/fixmateinformation/main.nf | 4 ++-- modules/picard/liftovervcf/main.nf | 6 +++--- modules/picard/markduplicates/main.nf | 6 +++--- modules/picard/mergesamfiles/main.nf | 4 ++-- modules/picard/sortvcf/main.nf | 4 ++-- 8 files changed, 21 insertions(+), 21 deletions(-) diff --git a/modules/picard/addorreplacereadgroups/main.nf b/modules/picard/addorreplacereadgroups/main.nf index 55200d2e..fd102f67 100644 --- a/modules/picard/addorreplacereadgroups/main.nf +++ b/modules/picard/addorreplacereadgroups/main.nf @@ -38,11 +38,11 @@ process PICARD_ADDORREPLACEREADGROUPS { -Xmx${avail_mem}g \\ --INPUT ${bam} \\ --OUTPUT ${prefix}.bam \\ - -ID ${ID} \\ - -LB ${LIBRARY} \\ - -PL ${PLATFORM} \\ - -PU ${BARCODE} \\ - -SM ${SAMPLE} \\ + --RGID ${ID} \\ + --RGLB ${LIBRARY} \\ + --RGPL ${PLATFORM} \\ + --RGPU ${BARCODE} \\ + --RGSM ${SAMPLE} \\ --CREATE_INDEX true cat <<-END_VERSIONS > versions.yml diff --git a/modules/picard/cleansam/main.nf b/modules/picard/cleansam/main.nf index 4cd4db58..62989565 100644 --- a/modules/picard/cleansam/main.nf +++ b/modules/picard/cleansam/main.nf @@ -31,8 +31,8 @@ process PICARD_CLEANSAM { -Xmx${avail_mem}g \\ CleanSam \\ ${args} \\ - -I ${bam} \\ - -O ${prefix}.bam + --INPUT ${bam} \\ + --OUTPUT ${prefix}.bam cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/modules/picard/createsequencedictionary/main.nf b/modules/picard/createsequencedictionary/main.nf index 3a8bb62a..735cc97b 100644 --- a/modules/picard/createsequencedictionary/main.nf +++ b/modules/picard/createsequencedictionary/main.nf @@ -31,8 +31,8 @@ process PICARD_CREATESEQUENCEDICTIONARY { -Xmx${avail_mem}g \\ CreateSequenceDictionary \\ $args \\ - -R $fasta \\ - -O ${prefix}.dict + --REFERENCE_SEQUENCE $fasta \\ + --OUTPUT ${prefix}.dict cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/modules/picard/fixmateinformation/main.nf b/modules/picard/fixmateinformation/main.nf index 1993e4fd..539b1082 100644 --- a/modules/picard/fixmateinformation/main.nf +++ b/modules/picard/fixmateinformation/main.nf @@ -31,8 +31,8 @@ process PICARD_FIXMATEINFORMATION { picard \\ FixMateInformation \\ -Xmx${avail_mem}g \\ - -I ${bam} \\ - -O ${prefix}.bam \\ + --INPUT ${bam} \\ + --OUTPUT ${prefix}.bam \\ --VALIDATION_STRINGENCY ${STRINGENCY} cat <<-END_VERSIONS > versions.yml diff --git a/modules/picard/liftovervcf/main.nf b/modules/picard/liftovervcf/main.nf index ea3f4cb7..c92abfeb 100644 --- a/modules/picard/liftovervcf/main.nf +++ b/modules/picard/liftovervcf/main.nf @@ -35,11 +35,11 @@ process PICARD_LIFTOVERVCF { -Xmx${avail_mem}g \\ LiftoverVcf \\ $args \\ - -I $input_vcf \\ - -O ${prefix}.lifted.vcf.gz \\ + --INPUT $input_vcf \\ + --OUTPUT ${prefix}.lifted.vcf.gz \\ --CHAIN $chain \\ --REJECT ${prefix}.unlifted.vcf.gz \\ - -R $fasta + --REFERENCE_SEQUENCE $fasta cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/modules/picard/markduplicates/main.nf b/modules/picard/markduplicates/main.nf index 58b6b88a..1565c647 100644 --- a/modules/picard/markduplicates/main.nf +++ b/modules/picard/markduplicates/main.nf @@ -33,9 +33,9 @@ process PICARD_MARKDUPLICATES { -Xmx${avail_mem}g \\ MarkDuplicates \\ $args \\ - -I $bam \\ - -O ${prefix}.bam \\ - -M ${prefix}.MarkDuplicates.metrics.txt + --INPUT $bam \\ + --OUTPUT ${prefix}.bam \\ + --METRICS_FILE ${prefix}.MarkDuplicates.metrics.txt cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/modules/picard/mergesamfiles/main.nf b/modules/picard/mergesamfiles/main.nf index 7b0185cd..1e32c63a 100644 --- a/modules/picard/mergesamfiles/main.nf +++ b/modules/picard/mergesamfiles/main.nf @@ -33,8 +33,8 @@ process PICARD_MERGESAMFILES { -Xmx${avail_mem}g \\ MergeSamFiles \\ $args \\ - ${'INPUT='+bam_files.join(' INPUT=')} \\ - OUTPUT=${prefix}.bam + ${'--INPUT '+bam_files.join(' --INPUT ')} \\ + --OUTPUT ${prefix}.bam cat <<-END_VERSIONS > versions.yml "${task.process}": picard: \$( echo \$(picard MergeSamFiles --version 2>&1) | grep -o 'Version:.*' | cut -f2- -d:) diff --git a/modules/picard/sortvcf/main.nf b/modules/picard/sortvcf/main.nf index 5fe0ecfd..fb8dbb79 100644 --- a/modules/picard/sortvcf/main.nf +++ b/modules/picard/sortvcf/main.nf @@ -22,8 +22,8 @@ process PICARD_SORTVCF { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" - def seq_dict = sequence_dict ? "-SEQUENCE_DICTIONARY $sequence_dict" : "" - def reference = reference ? "-REFERENCE_SEQUENCE $reference" : "" + def seq_dict = sequence_dict ? "--SEQUENCE_DICTIONARY $sequence_dict" : "" + def reference = reference ? "--REFERENCE_SEQUENCE $reference" : "" def avail_mem = 3 if (!task.memory) { log.info '[Picard SortVcf] Available memory not known - defaulting to 3GB. Specify process memory requirements to change this.' From 771d5a19f1cf72f663df27dd396e863193bc0f04 Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Fri, 29 Apr 2022 11:12:24 +0200 Subject: [PATCH 230/283] fix reference arg --- modules/picard/createsequencedictionary/main.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/picard/createsequencedictionary/main.nf b/modules/picard/createsequencedictionary/main.nf index 735cc97b..2348c496 100644 --- a/modules/picard/createsequencedictionary/main.nf +++ b/modules/picard/createsequencedictionary/main.nf @@ -31,7 +31,7 @@ process PICARD_CREATESEQUENCEDICTIONARY { -Xmx${avail_mem}g \\ CreateSequenceDictionary \\ $args \\ - --REFERENCE_SEQUENCE $fasta \\ + --REFERENCE $fasta \\ --OUTPUT ${prefix}.dict cat <<-END_VERSIONS > versions.yml From 73600b03393ee2ec585bdc6bb5ff2585fc47bdda Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Fri, 29 Apr 2022 14:20:15 +0200 Subject: [PATCH 231/283] Update subworkflows/nf-core/bam_qc_picard/main.nf Co-authored-by: Maxime U. Garcia --- subworkflows/nf-core/bam_qc_picard/main.nf | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/subworkflows/nf-core/bam_qc_picard/main.nf b/subworkflows/nf-core/bam_qc_picard/main.nf index b8be04b2..c3a441bc 100644 --- a/subworkflows/nf-core/bam_qc_picard/main.nf +++ b/subworkflows/nf-core/bam_qc_picard/main.nf @@ -22,12 +22,8 @@ workflow BAM_QC_PICARD { ch_versions = ch_versions.mix(PICARD_COLLECTMULTIPLEMETRICS.out.versions.first()) if (ch_bait_interval || ch_target_interval) { - if (ch_bait_interval.isEmpty()) { - log.error("Bait interval channel is empty") - } - if (ch_target_interval.isEmpty()) { - log.error("Target interval channel is empty") - } + if (!ch_bait_interval) log.error("Bait interval channel is empty") + if (!ch_target_interval) log.error("Target interval channel is empty") PICARD_COLLECTHSMETRICS( ch_bam, ch_fasta, ch_fasta_fai, ch_bait_interval, ch_target_interval ) ch_coverage_metrics.mix(PICARD_COLLECTHSMETRICS.out.metrics.first()) ch_versions = ch_versions.mix(PICARD_COLLECTHSMETRICS.out.versions.first()) From 71ccf0e207d2b034c931d2264b33f5f54abe1f59 Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Fri, 29 Apr 2022 14:24:19 +0200 Subject: [PATCH 232/283] Update subworkflows/nf-core/bam_qc_picard/main.nf Co-authored-by: Maxime U. Garcia --- subworkflows/nf-core/bam_qc_picard/main.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/subworkflows/nf-core/bam_qc_picard/main.nf b/subworkflows/nf-core/bam_qc_picard/main.nf index c3a441bc..e3c890d5 100644 --- a/subworkflows/nf-core/bam_qc_picard/main.nf +++ b/subworkflows/nf-core/bam_qc_picard/main.nf @@ -25,7 +25,7 @@ workflow BAM_QC_PICARD { if (!ch_bait_interval) log.error("Bait interval channel is empty") if (!ch_target_interval) log.error("Target interval channel is empty") PICARD_COLLECTHSMETRICS( ch_bam, ch_fasta, ch_fasta_fai, ch_bait_interval, ch_target_interval ) - ch_coverage_metrics.mix(PICARD_COLLECTHSMETRICS.out.metrics.first()) + ch_coverage_metrics = PICARD_COLLECTHSMETRICS.out.metrics ch_versions = ch_versions.mix(PICARD_COLLECTHSMETRICS.out.versions.first()) } else { PICARD_COLLECTWGSMETRICS( ch_bam, ch_fasta ) From 20e3767657ba1a4b87a412121321e79ca109b401 Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Fri, 29 Apr 2022 14:24:25 +0200 Subject: [PATCH 233/283] Update subworkflows/nf-core/bam_qc_picard/main.nf Co-authored-by: Maxime U. Garcia --- subworkflows/nf-core/bam_qc_picard/main.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/subworkflows/nf-core/bam_qc_picard/main.nf b/subworkflows/nf-core/bam_qc_picard/main.nf index e3c890d5..5f46e065 100644 --- a/subworkflows/nf-core/bam_qc_picard/main.nf +++ b/subworkflows/nf-core/bam_qc_picard/main.nf @@ -30,7 +30,7 @@ workflow BAM_QC_PICARD { } else { PICARD_COLLECTWGSMETRICS( ch_bam, ch_fasta ) ch_versions = ch_versions.mix(PICARD_COLLECTWGSMETRICS.out.versions.first()) - ch_coverage_metrics.mix(PICARD_COLLECTWGSMETRICS.out.metrics.first()) + ch_coverage_metrics.mix(PICARD_COLLECTWGSMETRICS.out.metrics) } emit: From 343c0ebe203e53ca9149ec473a8396836039a516 Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Fri, 29 Apr 2022 14:26:00 +0200 Subject: [PATCH 234/283] Update subworkflows/nf-core/bam_qc_picard/main.nf Co-authored-by: Maxime U. Garcia --- subworkflows/nf-core/bam_qc_picard/main.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/subworkflows/nf-core/bam_qc_picard/main.nf b/subworkflows/nf-core/bam_qc_picard/main.nf index 5f46e065..76709ea6 100644 --- a/subworkflows/nf-core/bam_qc_picard/main.nf +++ b/subworkflows/nf-core/bam_qc_picard/main.nf @@ -30,7 +30,7 @@ workflow BAM_QC_PICARD { } else { PICARD_COLLECTWGSMETRICS( ch_bam, ch_fasta ) ch_versions = ch_versions.mix(PICARD_COLLECTWGSMETRICS.out.versions.first()) - ch_coverage_metrics.mix(PICARD_COLLECTWGSMETRICS.out.metrics) + ch_coverage_metrics = ch_coverage_metrics.mix(PICARD_COLLECTWGSMETRICS.out.metrics) } emit: From 7e391c3a3bfc84b71e79077ba2569276e6fc5d2a Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Fri, 29 Apr 2022 14:26:06 +0200 Subject: [PATCH 235/283] Update subworkflows/nf-core/bam_qc_picard/main.nf Co-authored-by: Maxime U. Garcia --- subworkflows/nf-core/bam_qc_picard/main.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/subworkflows/nf-core/bam_qc_picard/main.nf b/subworkflows/nf-core/bam_qc_picard/main.nf index 76709ea6..e38697c3 100644 --- a/subworkflows/nf-core/bam_qc_picard/main.nf +++ b/subworkflows/nf-core/bam_qc_picard/main.nf @@ -25,7 +25,7 @@ workflow BAM_QC_PICARD { if (!ch_bait_interval) log.error("Bait interval channel is empty") if (!ch_target_interval) log.error("Target interval channel is empty") PICARD_COLLECTHSMETRICS( ch_bam, ch_fasta, ch_fasta_fai, ch_bait_interval, ch_target_interval ) - ch_coverage_metrics = PICARD_COLLECTHSMETRICS.out.metrics + ch_coverage_metrics = ch_coverage_metrics.mix(PICARD_COLLECTHSMETRICS.out.metrics) ch_versions = ch_versions.mix(PICARD_COLLECTHSMETRICS.out.versions.first()) } else { PICARD_COLLECTWGSMETRICS( ch_bam, ch_fasta ) From 9d0f03d96a6855af87f19f27728757e839e51842 Mon Sep 17 00:00:00 2001 From: Matthias De Smet <11850640+matthdsm@users.noreply.github.com> Date: Fri, 29 Apr 2022 15:07:09 +0200 Subject: [PATCH 236/283] Update modules/elprep/merge/main.nf Co-authored-by: James A. Fellows Yates --- modules/elprep/merge/main.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/elprep/merge/main.nf b/modules/elprep/merge/main.nf index 28fa9985..d5ffc497 100644 --- a/modules/elprep/merge/main.nf +++ b/modules/elprep/merge/main.nf @@ -29,7 +29,7 @@ process ELPREP_MERGE { mv ${bam} input/ elprep merge \\ - input \\ + input/ \\ output/${prefix}.${suffix} \\ $args \\ ${single_end} \\ From cb29ae2187a2bc624a948eee589ad11d168a069e Mon Sep 17 00:00:00 2001 From: Edmund Miller Date: Thu, 28 Apr 2022 21:14:52 -0500 Subject: [PATCH 237/283] chore: Use forms for bug report --- .github/ISSUE_TEMPLATE/bug_report.md | 64 --------------------------- .github/ISSUE_TEMPLATE/bug_report.yml | 50 +++++++++++++++++++++ 2 files changed, 50 insertions(+), 64 deletions(-) delete mode 100644 .github/ISSUE_TEMPLATE/bug_report.md create mode 100644 .github/ISSUE_TEMPLATE/bug_report.yml diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md deleted file mode 100644 index f1122ea3..00000000 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ /dev/null @@ -1,64 +0,0 @@ ---- -name: Bug report -about: Report something that is broken or incorrect -title: "[BUG]" ---- - - - -## Check Documentation - -I have checked the following places for your error: - -- [ ] [nf-core website: troubleshooting](https://nf-co.re/usage/troubleshooting) -- [ ] [nf-core/module documentation](https://github.com/nf-core/modules/blob/master/README.md) - -## Description of the bug - - - -## Steps to reproduce - -Steps to reproduce the behaviour: - -1. Command line: -2. See error: - -## Expected behaviour - - - -## Log files - -Have you provided the following extra information/files: - -- [ ] The command used to run the module -- [ ] The `.nextflow.log` file - -## System - -- Hardware: -- Executor: -- OS: -- Version - -## Nextflow Installation - -- Version: - -## Container engine - -- Engine: -- version: -- Image tag: - -## Additional context - - diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml new file mode 100644 index 00000000..d9720fb7 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -0,0 +1,50 @@ +name: Bug report +description: Report something that is broken or incorrect +labels: bug +body: + - type: markdown + attributes: + value: | + Before you post this issue, please check the documentation: + + - [nf-core website: troubleshooting](https://nf-co.re/usage/troubleshooting) + - [nf-core/rnaseq pipeline documentation](https://nf-co.re/rnaseq/usage) + + - type: textarea + id: description + attributes: + label: Description of the bug + description: A clear and concise description of what the bug is. + validations: + required: true + + - type: textarea + id: command_used + attributes: + label: Command used and terminal output + description: Steps to reproduce the behaviour. Please paste the command you used to launch the pipeline and the output from your terminal. + render: console + placeholder: | + $ nextflow run ... + + Some output where something broke + + - type: textarea + id: files + attributes: + label: Relevant files + description: | + Please drag and drop the relevant files here. Create a `.zip` archive if the extension is not allowed. + Your verbose log file `.nextflow.log` is often useful _(this is a hidden file in the directory where you launched the pipeline)_ as well as custom Nextflow configuration files. + + - type: textarea + id: system + attributes: + label: System information + description: | + * Nextflow version _(eg. 21.10.3)_ + * Hardware _(eg. HPC, Desktop, Cloud)_ + * Executor _(eg. slurm, local, awsbatch)_ + * Container engine: _(e.g. Docker, Singularity, Conda, Podman, Shifter or Charliecloud)_ + * OS _(eg. CentOS Linux, macOS, Linux Mint)_ + * Version of nf-core/rnaseq _(eg. 1.1, 1.5, 1.8.2)_ From 2da712f6f3d8f9b246e5fdcb1d786d5fbd6ef7d1 Mon Sep 17 00:00:00 2001 From: Edmund Miller Date: Thu, 28 Apr 2022 21:29:29 -0500 Subject: [PATCH 238/283] chore: Use forms for new modules --- .github/ISSUE_TEMPLATE/new_module.md | 26 ------------------- .github/ISSUE_TEMPLATE/new_module.yml | 36 +++++++++++++++++++++++++++ 2 files changed, 36 insertions(+), 26 deletions(-) delete mode 100644 .github/ISSUE_TEMPLATE/new_module.md create mode 100644 .github/ISSUE_TEMPLATE/new_module.yml diff --git a/.github/ISSUE_TEMPLATE/new_module.md b/.github/ISSUE_TEMPLATE/new_module.md deleted file mode 100644 index 5c7e61cc..00000000 --- a/.github/ISSUE_TEMPLATE/new_module.md +++ /dev/null @@ -1,26 +0,0 @@ ---- -name: New module -about: Suggest a new module for nf-core/modules -title: "new module: TOOL/SUBTOOL" -label: new module ---- - - - -I think it would be good to have a module for [TOOL](https://bioconda.github.io/recipes/TOOL/README.html) - -- [ ] This module does not exist yet with the [`nf-core modules list`](https://github.com/nf-core/tools#list-modules) command -- [ ] There is no [open pull request](https://github.com/nf-core/modules/pulls) for this module -- [ ] There is no [open issue](https://github.com/nf-core/modules/issues) for this module -- [ ] If I'm planning to work on this module, I added myself to the `Assignees` to facilitate tracking who is working on the module diff --git a/.github/ISSUE_TEMPLATE/new_module.yml b/.github/ISSUE_TEMPLATE/new_module.yml new file mode 100644 index 00000000..2d3e9d47 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/new_module.yml @@ -0,0 +1,36 @@ +name: New module +description: Suggest a new module for nf-core/modules +title: "new module: TOOL/SUBTOOL" +labels: new module +body: + - type: checkboxes + attributes: + label: Is there an existing module for this? + description: This module does not exist yet with the [`nf-core modules list`](https://github.com/nf-core/tools#list-modules) command + options: + - label: I have searched for the existing module + required: true + + - type: checkboxes + attributes: + label: Is there an open PR for this? + description: There is no [open pull request](https://github.com/nf-core/modules/pulls) for this module + options: + - label: I have searched for existing PRs + required: true + + - type: checkboxes + attributes: + label: Is there an open issue for this? + description: There is no [open issue](https://github.com/nf-core/modules/issues) for this module + options: + - label: I have searched for existing issues + required: true + + - type: checkboxes + attributes: + label: Are you going to work on this? + description: If I'm planning to work on this module, I added myself to the `Assignees` to facilitate tracking who is working on the module + options: + - label: If I'm planning to work on this module, I added myself to the `Assignees` to facilitate tracking who is working on the module + required: false From 816803dbe98bc5395e87ab51862b57d5b84b2824 Mon Sep 17 00:00:00 2001 From: Edmund Miller Date: Thu, 28 Apr 2022 21:36:49 -0500 Subject: [PATCH 239/283] chore: Add checkboxes for bug report --- .github/ISSUE_TEMPLATE/bug_report.yml | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml index d9720fb7..22dc47d4 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.yml +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -2,13 +2,15 @@ name: Bug report description: Report something that is broken or incorrect labels: bug body: - - type: markdown + - type: checkboxes attributes: - value: | - Before you post this issue, please check the documentation: - - - [nf-core website: troubleshooting](https://nf-co.re/usage/troubleshooting) - - [nf-core/rnaseq pipeline documentation](https://nf-co.re/rnaseq/usage) + label: Have you checked the docs? + description: I have checked the following places for my error + options: + - label: "[nf-core website: troubleshooting](https://nf-co.re/usage/troubleshooting)" + required: true + - label: "[nf-core modules documentation](https://nf-co.re/docs/contributing/modules)" + required: true - type: textarea id: description From ef0483586a1a2fdeff3f90584b558f14302a2973 Mon Sep 17 00:00:00 2001 From: Edmund Miller Date: Thu, 28 Apr 2022 21:46:36 -0500 Subject: [PATCH 240/283] chore: Use forms for feature request --- .github/ISSUE_TEMPLATE/feature_request.md | 32 ---------------------- .github/ISSUE_TEMPLATE/feature_request.yml | 32 ++++++++++++++++++++++ 2 files changed, 32 insertions(+), 32 deletions(-) delete mode 100644 .github/ISSUE_TEMPLATE/feature_request.md create mode 100644 .github/ISSUE_TEMPLATE/feature_request.yml diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md deleted file mode 100644 index 72d6c058..00000000 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ /dev/null @@ -1,32 +0,0 @@ ---- -name: Feature request -about: Suggest an idea for nf-core/modules -title: "[FEATURE]" ---- - - - -## Is your feature request related to a problem? Please describe - - - - - -## Describe the solution you'd like - - - -## Describe alternatives you've considered - - - -## Additional context - - diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/feature_request.yml new file mode 100644 index 00000000..316fba90 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.yml @@ -0,0 +1,32 @@ +name: Feature request +description: Suggest an idea for nf-core/modules +labels: feature +title: "[FEATURE]" +body: + - type: textarea + id: description + attributes: + label: Is your feature request related to a problem? Please describe + description: A clear and concise description of what the bug is. + placeholder: | + + validations: + required: true + + - type: textarea + id: solution + attributes: + label: Describe the solution you'd like + description: A clear and concise description of the solution you want to happen. + + - type: textarea + id: alternatives + attributes: + label: Describe alternatives you've considered + description: A clear and concise description of any alternative solutions or features you've considered. + + - type: textarea + id: additional_context + attributes: + label: Additional context + description: Add any other context about the feature request here. From 8bee7d47489ae3a6652e7925a841f32b39f15609 Mon Sep 17 00:00:00 2001 From: Edmund Miller Date: Fri, 29 Apr 2022 09:28:08 -0500 Subject: [PATCH 241/283] chore: Add image tag to bug report Co-authored-by: FriederikeHanssen --- .github/ISSUE_TEMPLATE/bug_report.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml index 22dc47d4..349dd44d 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.yml +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -49,4 +49,4 @@ body: * Executor _(eg. slurm, local, awsbatch)_ * Container engine: _(e.g. Docker, Singularity, Conda, Podman, Shifter or Charliecloud)_ * OS _(eg. CentOS Linux, macOS, Linux Mint)_ - * Version of nf-core/rnaseq _(eg. 1.1, 1.5, 1.8.2)_ + * Image tag: From fb02d6e85f95e5564ca446fe448e94db5a78d470 Mon Sep 17 00:00:00 2001 From: Edmund Miller Date: Fri, 29 Apr 2022 09:30:03 -0500 Subject: [PATCH 242/283] chore: add OS version to bug report Co-authored-by: FriederikeHanssen --- .github/ISSUE_TEMPLATE/bug_report.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml index 349dd44d..71afc6b0 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.yml +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -48,5 +48,5 @@ body: * Hardware _(eg. HPC, Desktop, Cloud)_ * Executor _(eg. slurm, local, awsbatch)_ * Container engine: _(e.g. Docker, Singularity, Conda, Podman, Shifter or Charliecloud)_ - * OS _(eg. CentOS Linux, macOS, Linux Mint)_ + * OS and version: _(eg. CentOS Linux, macOS, Ubuntu 22.04)_ * Image tag: From a8a4d76a65af792ca5ab254e9eb0cb521a70047b Mon Sep 17 00:00:00 2001 From: Edmund Miller Date: Fri, 29 Apr 2022 09:30:45 -0500 Subject: [PATCH 243/283] chore: Add container version Co-authored-by: FriederikeHanssen --- .github/ISSUE_TEMPLATE/bug_report.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml index 71afc6b0..74907cfe 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.yml +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -47,6 +47,6 @@ body: * Nextflow version _(eg. 21.10.3)_ * Hardware _(eg. HPC, Desktop, Cloud)_ * Executor _(eg. slurm, local, awsbatch)_ - * Container engine: _(e.g. Docker, Singularity, Conda, Podman, Shifter or Charliecloud)_ + * Container engine and version: _(e.g. Docker 1.0.0, Singularity, Conda, Podman, Shifter or Charliecloud)_ * OS and version: _(eg. CentOS Linux, macOS, Ubuntu 22.04)_ * Image tag: From 39530b5ca710e028fdec94a704e5eed3f78c5800 Mon Sep 17 00:00:00 2001 From: James Fellows Yates Date: Fri, 29 Apr 2022 21:05:12 +0200 Subject: [PATCH 244/283] Bump DIAMOND version to 2.0.15 --- modules/diamond/blastp/main.nf | 8 +++----- modules/diamond/blastx/main.nf | 8 +++----- modules/diamond/makedb/main.nf | 8 +++----- tests/config/test_data.config | 1 + tests/modules/diamond/blastp/main.nf | 8 ++++---- tests/modules/diamond/blastp/test.yml | 8 ++++---- tests/modules/diamond/blastx/main.nf | 4 ++-- tests/modules/diamond/blastx/test.yml | 3 ++- tests/modules/diamond/makedb/main.nf | 2 +- tests/modules/diamond/makedb/test.yml | 9 +++++---- 10 files changed, 28 insertions(+), 31 deletions(-) diff --git a/modules/diamond/blastp/main.nf b/modules/diamond/blastp/main.nf index d7c53d6f..ccd455f4 100644 --- a/modules/diamond/blastp/main.nf +++ b/modules/diamond/blastp/main.nf @@ -2,12 +2,10 @@ process DIAMOND_BLASTP { tag "$meta.id" label 'process_medium' - // Dimaond is limited to v2.0.9 because there is not a - // singularity version higher than this at the current time. - conda (params.enable_conda ? "bioconda::diamond=2.0.9" : null) + conda (params.enable_conda ? "bioconda::diamond=2.0.15" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/diamond:2.0.9--hdcc8f71_0' : - 'quay.io/biocontainers/diamond:2.0.9--hdcc8f71_0' }" + 'https://depot.galaxyproject.org/singularity/diamond:2.0.15--hb97b32f_0' : + 'quay.io/biocontainers/diamond:2.0.15--hb97b32f_0' }" input: tuple val(meta), path(fasta) diff --git a/modules/diamond/blastx/main.nf b/modules/diamond/blastx/main.nf index 6703c1e4..357427eb 100644 --- a/modules/diamond/blastx/main.nf +++ b/modules/diamond/blastx/main.nf @@ -2,12 +2,10 @@ process DIAMOND_BLASTX { tag "$meta.id" label 'process_medium' - // Dimaond is limited to v2.0.9 because there is not a - // singularity version higher than this at the current time. - conda (params.enable_conda ? "bioconda::diamond=2.0.9" : null) + conda (params.enable_conda ? "bioconda::diamond=2.0.15" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/diamond:2.0.9--hdcc8f71_0' : - 'quay.io/biocontainers/diamond:2.0.9--hdcc8f71_0' }" + 'https://depot.galaxyproject.org/singularity/diamond:2.0.15--hb97b32f_0' : + 'quay.io/biocontainers/diamond:2.0.15--hb97b32f_0' }" input: tuple val(meta), path(fasta) diff --git a/modules/diamond/makedb/main.nf b/modules/diamond/makedb/main.nf index e3d62f00..a76a94e5 100644 --- a/modules/diamond/makedb/main.nf +++ b/modules/diamond/makedb/main.nf @@ -2,12 +2,10 @@ process DIAMOND_MAKEDB { tag "$fasta" label 'process_medium' - // Dimaond is limited to v2.0.9 because there is not a - // singularity version higher than this at the current time. - conda (params.enable_conda ? 'bioconda::diamond=2.0.9' : null) + conda (params.enable_conda ? "bioconda::diamond=2.0.15" : null) container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ? - 'https://depot.galaxyproject.org/singularity/diamond:2.0.9--hdcc8f71_0' : - 'quay.io/biocontainers/diamond:2.0.9--hdcc8f71_0' }" + 'https://depot.galaxyproject.org/singularity/diamond:2.0.15--hb97b32f_0' : + 'quay.io/biocontainers/diamond:2.0.15--hb97b32f_0' }" input: path fasta diff --git a/tests/config/test_data.config b/tests/config/test_data.config index 5d5535c4..62e38c4d 100644 --- a/tests/config/test_data.config +++ b/tests/config/test_data.config @@ -14,6 +14,7 @@ params { genome_paf = "${test_data_dir}/genomics/sarscov2/genome/genome.paf" genome_sizes = "${test_data_dir}/genomics/sarscov2/genome/genome.sizes" transcriptome_fasta = "${test_data_dir}/genomics/sarscov2/genome/transcriptome.fasta" + proteome_fasta = "${test_data_dir}/genomics/sarscov2/genome/proteome.fasta" transcriptome_paf = "${test_data_dir}/genomics/sarscov2/genome/transcriptome.paf" test_bed = "${test_data_dir}/genomics/sarscov2/genome/bed/test.bed" diff --git a/tests/modules/diamond/blastp/main.nf b/tests/modules/diamond/blastp/main.nf index 5c1459d8..80ea2ec5 100644 --- a/tests/modules/diamond/blastp/main.nf +++ b/tests/modules/diamond/blastp/main.nf @@ -7,8 +7,8 @@ include { DIAMOND_BLASTP } from '../../../../modules/diamond/blastp/main.nf' workflow test_diamond_blastp { - db = [ file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ] - fasta = [ file(params.test_data['sarscov2']['genome']['transcriptome_fasta'], checkIfExists: true) ] + db = [ file(params.test_data['sarscov2']['genome']['proteome_fasta'], checkIfExists: true) ] + fasta = [ file(params.test_data['sarscov2']['genome']['proteome_fasta'], checkIfExists: true) ] outext = 'txt' DIAMOND_MAKEDB ( db ) @@ -17,8 +17,8 @@ workflow test_diamond_blastp { workflow test_diamond_blastp_daa { - db = [ file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ] - fasta = [ file(params.test_data['sarscov2']['genome']['transcriptome_fasta'], checkIfExists: true) ] + db = [ file(params.test_data['sarscov2']['genome']['proteome_fasta'], checkIfExists: true) ] + fasta = [ file(params.test_data['sarscov2']['genome']['proteome_fasta'], checkIfExists: true) ] outext = 'daa' DIAMOND_MAKEDB ( db ) diff --git a/tests/modules/diamond/blastp/test.yml b/tests/modules/diamond/blastp/test.yml index cb297743..c2b8b6f5 100644 --- a/tests/modules/diamond/blastp/test.yml +++ b/tests/modules/diamond/blastp/test.yml @@ -1,19 +1,19 @@ - name: diamond blastp test_diamond_blastp command: nextflow run tests/modules/diamond/blastp -entry test_diamond_blastp -c tests/config/nextflow.config tags: - - diamond - diamond/blastp + - diamond files: - path: output/diamond/test.diamond_blastp.txt - md5sum: 3ca7f6290c1d8741c573370e6f8b4db0 + md5sum: 2515cf88590afa32356497e79a51fce9 - path: output/diamond/versions.yml - name: diamond blastp test_diamond_blastp_daa command: nextflow run tests/modules/diamond/blastp -entry test_diamond_blastp_daa -c tests/config/nextflow.config tags: - - diamond - diamond/blastp + - diamond files: - path: output/diamond/test.diamond_blastp.daa - md5sum: d4a79ad1fcb2ec69460e5a09a9468db7 + md5sum: 0b539c68a5b66dd6e20ad5d218f4f4c6 - path: output/diamond/versions.yml diff --git a/tests/modules/diamond/blastx/main.nf b/tests/modules/diamond/blastx/main.nf index d6d5a77a..d5949762 100644 --- a/tests/modules/diamond/blastx/main.nf +++ b/tests/modules/diamond/blastx/main.nf @@ -7,7 +7,7 @@ include { DIAMOND_BLASTX } from '../../../../modules/diamond/blastx/main.nf' workflow test_diamond_blastx { - db = [ file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ] + db = [ file(params.test_data['sarscov2']['genome']['proteome_fasta'], checkIfExists: true) ] fasta = [ file(params.test_data['sarscov2']['genome']['transcriptome_fasta'], checkIfExists: true) ] outext = 'txt' @@ -17,7 +17,7 @@ workflow test_diamond_blastx { workflow test_diamond_blastx_daa { - db = [ file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ] + db = [ file(params.test_data['sarscov2']['genome']['proteome_fasta'], checkIfExists: true) ] fasta = [ file(params.test_data['sarscov2']['genome']['transcriptome_fasta'], checkIfExists: true) ] outext = 'daa' diff --git a/tests/modules/diamond/blastx/test.yml b/tests/modules/diamond/blastx/test.yml index a2d35c80..9c30ba25 100644 --- a/tests/modules/diamond/blastx/test.yml +++ b/tests/modules/diamond/blastx/test.yml @@ -5,6 +5,7 @@ - diamond/blastx files: - path: output/diamond/test.diamond_blastx.txt + md5sum: eb2aebfa1cb42fcb2121c65528663307 - path: output/diamond/versions.yml - name: diamond blastx test_diamond_blastx_daa @@ -14,5 +15,5 @@ - diamond/blastx files: - path: output/diamond/test.diamond_blastx.daa - md5sum: 2a0ce0f7e01dcead828b87d5cbaccf7a + md5sum: 0df4a833408416f32981415873facc11 - path: output/diamond/versions.yml diff --git a/tests/modules/diamond/makedb/main.nf b/tests/modules/diamond/makedb/main.nf index 70982ae9..d309de6d 100644 --- a/tests/modules/diamond/makedb/main.nf +++ b/tests/modules/diamond/makedb/main.nf @@ -6,7 +6,7 @@ include { DIAMOND_MAKEDB } from '../../../../modules/diamond/makedb/main.nf' workflow test_diamond_makedb { - input = [ file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) ] + input = [ file(params.test_data['sarscov2']['genome']['proteome_fasta'], checkIfExists: true) ] DIAMOND_MAKEDB ( input ) } diff --git a/tests/modules/diamond/makedb/test.yml b/tests/modules/diamond/makedb/test.yml index c8f2d79e..cdddf735 100644 --- a/tests/modules/diamond/makedb/test.yml +++ b/tests/modules/diamond/makedb/test.yml @@ -1,8 +1,9 @@ - name: diamond makedb test_diamond_makedb - command: nextflow run ./tests/modules/diamond/makedb -entry test_diamond_makedb -c ./tests/config/nextflow.config -c ./tests/modules/diamond/makedb/nextflow.config + command: nextflow run tests/modules/diamond/makedb -entry test_diamond_makedb -c tests/config/nextflow.config tags: - - diamond - diamond/makedb + - diamond files: - - path: output/diamond/genome.fasta.dmnd - md5sum: 2447fb376394c20d43ea3aad2aa5d15d + - path: output/diamond/proteome.fasta.dmnd + md5sum: fc28c50b202dd7a7c5451cddff2ba1f4 + - path: output/diamond/versions.yml From 996385fb0f2018d4bb79bc82aea329a58a1e6fe3 Mon Sep 17 00:00:00 2001 From: James Fellows Yates Date: Mon, 2 May 2022 09:36:34 +0200 Subject: [PATCH 245/283] Remove MD5sumfor DIAMOND DAA file due to occasioanlly variability --- tests/modules/diamond/blastp/test.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/modules/diamond/blastp/test.yml b/tests/modules/diamond/blastp/test.yml index c2b8b6f5..4fad0cbf 100644 --- a/tests/modules/diamond/blastp/test.yml +++ b/tests/modules/diamond/blastp/test.yml @@ -15,5 +15,4 @@ - diamond files: - path: output/diamond/test.diamond_blastp.daa - md5sum: 0b539c68a5b66dd6e20ad5d218f4f4c6 - path: output/diamond/versions.yml From 6b64f9cb6c3dd3577931cc3cd032d6fb730000ce Mon Sep 17 00:00:00 2001 From: Lucpen Date: Mon, 2 May 2022 10:34:14 +0200 Subject: [PATCH 246/283] feat added stub to modules --- modules/gatk4/mergebamalignment/main.nf | 11 +++++++++++ modules/gatk4/mutect2/main.nf | 14 ++++++++++++++ modules/gatk4/revertsam/main.nf | 11 +++++++++++ modules/gatk4/samtofastq/main.nf | 13 +++++++++++++ modules/samtools/view/main.nf | 12 ++++++++++++ 5 files changed, 61 insertions(+) diff --git a/modules/gatk4/mergebamalignment/main.nf b/modules/gatk4/mergebamalignment/main.nf index 7ba9ccda..5c36b4ba 100644 --- a/modules/gatk4/mergebamalignment/main.nf +++ b/modules/gatk4/mergebamalignment/main.nf @@ -43,4 +43,15 @@ process GATK4_MERGEBAMALIGNMENT { gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ + + stub: + def prefix = task.ext.prefix ?: "${meta.id}" + """ + touch ${prefix}.bam + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + END_VERSIONS + """ } diff --git a/modules/gatk4/mutect2/main.nf b/modules/gatk4/mutect2/main.nf index 4a1f5768..a214b57d 100644 --- a/modules/gatk4/mutect2/main.nf +++ b/modules/gatk4/mutect2/main.nf @@ -57,4 +57,18 @@ process GATK4_MUTECT2 { gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ + + stub: + def prefix = task.ext.prefix ?: "${meta.id}" + """ + touch ${prefix}.vcf.gz + touch ${prefix}.tbi + touch ${prefix}.stats + touch ${prefix}.f1r2.tar.gz + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + END_VERSIONS + """ } diff --git a/modules/gatk4/revertsam/main.nf b/modules/gatk4/revertsam/main.nf index 4e8e9ddc..3084658d 100644 --- a/modules/gatk4/revertsam/main.nf +++ b/modules/gatk4/revertsam/main.nf @@ -39,4 +39,15 @@ process GATK4_REVERTSAM { gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ + + stub: + def prefix = task.ext.prefix ?: "${meta.id}" + """ + touch ${prefix}.reverted.bam + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + END_VERSIONS + """ } diff --git a/modules/gatk4/samtofastq/main.nf b/modules/gatk4/samtofastq/main.nf index 8553e419..d8d94d69 100644 --- a/modules/gatk4/samtofastq/main.nf +++ b/modules/gatk4/samtofastq/main.nf @@ -40,4 +40,17 @@ process GATK4_SAMTOFASTQ { gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') END_VERSIONS """ + + stub: + def prefix = task.ext.prefix ?: "${meta.id}" + """ + touch ${prefix}.fastq.gz + touch ${prefix}_1.fastq.gz + touch ${prefix}_2.fastq.gz + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + gatk4: \$(echo \$(gatk --version 2>&1) | sed 's/^.*(GATK) v//; s/ .*\$//') + END_VERSIONS + """ } diff --git a/modules/samtools/view/main.nf b/modules/samtools/view/main.nf index 11cfb74b..55194e88 100644 --- a/modules/samtools/view/main.nf +++ b/modules/samtools/view/main.nf @@ -41,4 +41,16 @@ process SAMTOOLS_VIEW { samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') END_VERSIONS """ + + stub: + def prefix = task.ext.prefix ?: "${meta.id}" + """ + touch ${prefix}.bam + touch ${prefix}.cram + + cat <<-END_VERSIONS > versions.yml + "${task.process}": + samtools: \$(echo \$(samtools --version 2>&1) | sed 's/^.*samtools //; s/Using.*\$//') + END_VERSIONS + """ } From 8a5bcbc325d20f081c9a722bc47f380df43448b7 Mon Sep 17 00:00:00 2001 From: Lucpen Date: Mon, 2 May 2022 11:06:08 +0200 Subject: [PATCH 247/283] fix the stub of gatk4_revertsam --- modules/gatk4/revertsam/main.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/gatk4/revertsam/main.nf b/modules/gatk4/revertsam/main.nf index 3084658d..162aa0fa 100644 --- a/modules/gatk4/revertsam/main.nf +++ b/modules/gatk4/revertsam/main.nf @@ -43,7 +43,7 @@ process GATK4_REVERTSAM { stub: def prefix = task.ext.prefix ?: "${meta.id}" """ - touch ${prefix}.reverted.bam + touch ${prefix}.bam cat <<-END_VERSIONS > versions.yml "${task.process}": From 0511e7fbbfa4ba41940d33b687b1cc90227b4eb8 Mon Sep 17 00:00:00 2001 From: Praveen Date: Mon, 2 May 2022 11:35:15 +0200 Subject: [PATCH 248/283] Changed BAI as optional output to handle large genome --- modules/gatk4/markduplicates/main.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/gatk4/markduplicates/main.nf b/modules/gatk4/markduplicates/main.nf index e8a98156..2650925b 100644 --- a/modules/gatk4/markduplicates/main.nf +++ b/modules/gatk4/markduplicates/main.nf @@ -12,7 +12,7 @@ process GATK4_MARKDUPLICATES { output: tuple val(meta), path("*.bam") , emit: bam - tuple val(meta), path("*.bai") , emit: bai + tuple val(meta), path("*.bai") , optional:true, emit: bai tuple val(meta), path("*.metrics"), emit: metrics path "versions.yml" , emit: versions From 6986975bc0425e0b0e6c93dce648167f91f7ebc9 Mon Sep 17 00:00:00 2001 From: James Fellows Yates Date: Mon, 2 May 2022 11:39:07 +0200 Subject: [PATCH 249/283] Fix output channels allow BLAST table customisation --- modules/diamond/blastp/main.nf | 14 +++++++++--- modules/diamond/blastp/meta.yml | 34 ++++++++++++++++++++++++++-- modules/diamond/blastx/main.nf | 14 +++++++++--- modules/diamond/blastx/meta.yml | 28 +++++++++++++++++++++-- tests/modules/diamond/blastp/main.nf | 6 +++-- tests/modules/diamond/blastx/main.nf | 6 +++-- 6 files changed, 88 insertions(+), 14 deletions(-) diff --git a/modules/diamond/blastp/main.nf b/modules/diamond/blastp/main.nf index ccd455f4..6b750145 100644 --- a/modules/diamond/blastp/main.nf +++ b/modules/diamond/blastp/main.nf @@ -11,10 +11,17 @@ process DIAMOND_BLASTP { tuple val(meta), path(fasta) path db val outext + val blast_columns output: - tuple val(meta), path('*.{blast,xml,txt,daa,sam,tsv,paf}'), emit: output - path "versions.yml" , emit: versions + tuple val(meta), path('*.{blast}'), optional: true, emit: blast + tuple val(meta), path('*.{xml}') , optional: true, emit: xml + tuple val(meta), path('*.{txt}') , optional: true, emit: txt + tuple val(meta), path('*.{daa}') , optional: true, emit: daa + tuple val(meta), path('*.{sam}') , optional: true, emit: sam + tuple val(meta), path('*.{tsv}') , optional: true, emit: tsv + tuple val(meta), path('*.{paf}') , optional: true, emit: paf + path "versions.yml" , emit: versions when: task.ext.when == null || task.ext.when @@ -22,6 +29,7 @@ process DIAMOND_BLASTP { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" + def columns = blast_columns ? "${blast_columns}" : '' switch ( outext ) { case "blast": outfmt = 0; break case "xml": outfmt = 5; break @@ -39,7 +47,7 @@ process DIAMOND_BLASTP { --threads $task.cpus \\ --db \$DB \\ --query $fasta \\ - --outfmt ${outfmt} \\ + --outfmt ${outfmt} ${columns} \\ $args \\ --out ${prefix}.${outext} diff --git a/modules/diamond/blastp/meta.yml b/modules/diamond/blastp/meta.yml index 1aa36c23..0bc12889 100644 --- a/modules/diamond/blastp/meta.yml +++ b/modules/diamond/blastp/meta.yml @@ -36,12 +36,42 @@ input: `txt` corresponds to to BLAST tabular format. `tsv` corresponds to taxonomic classification format. pattern: "blast|xml|txt|daa|sam|tsv|paf" + - blast_columns: + type: string + description: | + Optional space separated list of DIAMOND tabular BLAST output keywords + used for in conjunction with the 'txt' outext option (--outfmt 6). See + DIAMOND documnetation for more information. output: - - txt: + - blast: type: file description: File containing blastp hits - pattern: "*.{blastp.txt}" + pattern: "*.{blast}" + - xml: + type: file + description: File containing blastp hits + pattern: "*.{xml}" + - txt: + type: file + description: File containing hits in tabular BLAST format. + pattern: "*.{txt}" + - daa: + type: file + description: File containing hits DAA format + pattern: "*.{daa}" + - sam: + type: file + description: File containing aligned reads in SAM format + pattern: "*.{sam}" + - tsv: + type: file + description: Tab separated file containing taxonomic classification of hits + pattern: "*.{tsv}" + - paf: + type: file + description: File containing aligned reads in pairwise mapping format format + pattern: "*.{paf}" - versions: type: file description: File containing software versions diff --git a/modules/diamond/blastx/main.nf b/modules/diamond/blastx/main.nf index 357427eb..bb630e32 100644 --- a/modules/diamond/blastx/main.nf +++ b/modules/diamond/blastx/main.nf @@ -11,10 +11,17 @@ process DIAMOND_BLASTX { tuple val(meta), path(fasta) path db val outext + val blast_columns output: - tuple val(meta), path('*.{blast,xml,txt,daa,sam,tsv,paf}'), emit: output - path "versions.yml" , emit: versions + tuple val(meta), path('*.{blast}'), optional: true, emit: blast + tuple val(meta), path('*.{xml}') , optional: true, emit: xml + tuple val(meta), path('*.{txt}') , optional: true, emit: txt + tuple val(meta), path('*.{daa}') , optional: true, emit: daa + tuple val(meta), path('*.{sam}') , optional: true, emit: sam + tuple val(meta), path('*.{tsv}') , optional: true, emit: tsv + tuple val(meta), path('*.{paf}') , optional: true, emit: paf + path "versions.yml" , emit: versions when: task.ext.when == null || task.ext.when @@ -22,6 +29,7 @@ process DIAMOND_BLASTX { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" + def columns = blast_columns ? "${blast_columns}" : '' switch ( outext ) { case "blast": outfmt = 0; break case "xml": outfmt = 5; break @@ -39,7 +47,7 @@ process DIAMOND_BLASTX { --threads $task.cpus \\ --db \$DB \\ --query $fasta \\ - --outfmt ${outfmt} \\ + --outfmt ${outfmt} ${columns} \\ $args \\ --out ${prefix}.${outext} diff --git a/modules/diamond/blastx/meta.yml b/modules/diamond/blastx/meta.yml index 5ee2d55e..64645d34 100644 --- a/modules/diamond/blastx/meta.yml +++ b/modules/diamond/blastx/meta.yml @@ -38,10 +38,34 @@ input: pattern: "blast|xml|txt|daa|sam|tsv|paf" output: + - blast: + type: file + description: File containing blastp hits + pattern: "*.{blast}" + - xml: + type: file + description: File containing blastp hits + pattern: "*.{xml}" - txt: type: file - description: File containing blastx hits - pattern: "*.{blastx.txt}" + description: File containing hits in tabular BLAST format. + pattern: "*.{txt}" + - daa: + type: file + description: File containing hits DAA format + pattern: "*.{daa}" + - sam: + type: file + description: File containing aligned reads in SAM format + pattern: "*.{sam}" + - tsv: + type: file + description: Tab separated file containing taxonomic classification of hits + pattern: "*.{tsv}" + - paf: + type: file + description: File containing aligned reads in pairwise mapping format format + pattern: "*.{paf}" - versions: type: file description: File containing software versions diff --git a/tests/modules/diamond/blastp/main.nf b/tests/modules/diamond/blastp/main.nf index 80ea2ec5..e90599f0 100644 --- a/tests/modules/diamond/blastp/main.nf +++ b/tests/modules/diamond/blastp/main.nf @@ -10,9 +10,10 @@ workflow test_diamond_blastp { db = [ file(params.test_data['sarscov2']['genome']['proteome_fasta'], checkIfExists: true) ] fasta = [ file(params.test_data['sarscov2']['genome']['proteome_fasta'], checkIfExists: true) ] outext = 'txt' + blast_columns = 'qseqid qlen' DIAMOND_MAKEDB ( db ) - DIAMOND_BLASTP ( [ [id:'test'], fasta ], DIAMOND_MAKEDB.out.db, outext ) + DIAMOND_BLASTP ( [ [id:'test'], fasta ], DIAMOND_MAKEDB.out.db, outext, blast_columns ) } workflow test_diamond_blastp_daa { @@ -20,7 +21,8 @@ workflow test_diamond_blastp_daa { db = [ file(params.test_data['sarscov2']['genome']['proteome_fasta'], checkIfExists: true) ] fasta = [ file(params.test_data['sarscov2']['genome']['proteome_fasta'], checkIfExists: true) ] outext = 'daa' + blast_columns = [] DIAMOND_MAKEDB ( db ) - DIAMOND_BLASTP ( [ [id:'test'], fasta ], DIAMOND_MAKEDB.out.db, outext ) + DIAMOND_BLASTP ( [ [id:'test'], fasta ], DIAMOND_MAKEDB.out.db, outext, blast_columns ) } diff --git a/tests/modules/diamond/blastx/main.nf b/tests/modules/diamond/blastx/main.nf index d5949762..8f244528 100644 --- a/tests/modules/diamond/blastx/main.nf +++ b/tests/modules/diamond/blastx/main.nf @@ -10,9 +10,10 @@ workflow test_diamond_blastx { db = [ file(params.test_data['sarscov2']['genome']['proteome_fasta'], checkIfExists: true) ] fasta = [ file(params.test_data['sarscov2']['genome']['transcriptome_fasta'], checkIfExists: true) ] outext = 'txt' + blast_columns = 'qseqid qlen' DIAMOND_MAKEDB ( db ) - DIAMOND_BLASTX ( [ [id:'test'], fasta ], DIAMOND_MAKEDB.out.db, outext ) + DIAMOND_BLASTX ( [ [id:'test'], fasta ], DIAMOND_MAKEDB.out.db, outext, blast_columns ) } workflow test_diamond_blastx_daa { @@ -20,7 +21,8 @@ workflow test_diamond_blastx_daa { db = [ file(params.test_data['sarscov2']['genome']['proteome_fasta'], checkIfExists: true) ] fasta = [ file(params.test_data['sarscov2']['genome']['transcriptome_fasta'], checkIfExists: true) ] outext = 'daa' + blast_columns = [] DIAMOND_MAKEDB ( db ) - DIAMOND_BLASTX ( [ [id:'test'], fasta ], DIAMOND_MAKEDB.out.db, outext ) + DIAMOND_BLASTX ( [ [id:'test'], fasta ], DIAMOND_MAKEDB.out.db, outext, blast_columns ) } From 58e5c6aecefbff55b8f29bb542b908da0bd293f7 Mon Sep 17 00:00:00 2001 From: James Fellows Yates Date: Mon, 2 May 2022 11:55:43 +0200 Subject: [PATCH 250/283] Make variables consistent --- modules/diamond/blastp/main.nf | 6 +++--- modules/diamond/blastp/meta.yml | 4 ++-- modules/diamond/blastx/main.nf | 6 +++--- modules/diamond/blastx/meta.yml | 2 +- tests/modules/diamond/blastp/main.nf | 8 ++++---- tests/modules/diamond/blastx/main.nf | 8 ++++---- 6 files changed, 17 insertions(+), 17 deletions(-) diff --git a/modules/diamond/blastp/main.nf b/modules/diamond/blastp/main.nf index 6b750145..0d78e230 100644 --- a/modules/diamond/blastp/main.nf +++ b/modules/diamond/blastp/main.nf @@ -10,7 +10,7 @@ process DIAMOND_BLASTP { input: tuple val(meta), path(fasta) path db - val outext + val out_ext val blast_columns output: @@ -30,7 +30,7 @@ process DIAMOND_BLASTP { def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" def columns = blast_columns ? "${blast_columns}" : '' - switch ( outext ) { + switch ( out_ext ) { case "blast": outfmt = 0; break case "xml": outfmt = 5; break case "txt": outfmt = 6; break @@ -49,7 +49,7 @@ process DIAMOND_BLASTP { --query $fasta \\ --outfmt ${outfmt} ${columns} \\ $args \\ - --out ${prefix}.${outext} + --out ${prefix}.${out_ext} cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/modules/diamond/blastp/meta.yml b/modules/diamond/blastp/meta.yml index 0bc12889..3aa81e53 100644 --- a/modules/diamond/blastp/meta.yml +++ b/modules/diamond/blastp/meta.yml @@ -28,7 +28,7 @@ input: type: directory description: Directory containing the protein blast database pattern: "*" - - outext: + - out_ext: type: string description: | Specify the type of output file to be generated. `blast` corresponds to @@ -40,7 +40,7 @@ input: type: string description: | Optional space separated list of DIAMOND tabular BLAST output keywords - used for in conjunction with the 'txt' outext option (--outfmt 6). See + used for in conjunction with the 'txt' out_ext option (--outfmt 6). See DIAMOND documnetation for more information. output: diff --git a/modules/diamond/blastx/main.nf b/modules/diamond/blastx/main.nf index bb630e32..ef641435 100644 --- a/modules/diamond/blastx/main.nf +++ b/modules/diamond/blastx/main.nf @@ -10,7 +10,7 @@ process DIAMOND_BLASTX { input: tuple val(meta), path(fasta) path db - val outext + val out_ext val blast_columns output: @@ -30,7 +30,7 @@ process DIAMOND_BLASTX { def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" def columns = blast_columns ? "${blast_columns}" : '' - switch ( outext ) { + switch ( out_ext ) { case "blast": outfmt = 0; break case "xml": outfmt = 5; break case "txt": outfmt = 6; break @@ -49,7 +49,7 @@ process DIAMOND_BLASTX { --query $fasta \\ --outfmt ${outfmt} ${columns} \\ $args \\ - --out ${prefix}.${outext} + --out ${prefix}.${out_ext} cat <<-END_VERSIONS > versions.yml "${task.process}": diff --git a/modules/diamond/blastx/meta.yml b/modules/diamond/blastx/meta.yml index 64645d34..2dcd7bc6 100644 --- a/modules/diamond/blastx/meta.yml +++ b/modules/diamond/blastx/meta.yml @@ -28,7 +28,7 @@ input: type: directory description: Directory containing the nucelotide blast database pattern: "*" - - outext: + - out_ext: type: string description: | Specify the type of output file to be generated. `blast` corresponds to diff --git a/tests/modules/diamond/blastp/main.nf b/tests/modules/diamond/blastp/main.nf index e90599f0..ff669233 100644 --- a/tests/modules/diamond/blastp/main.nf +++ b/tests/modules/diamond/blastp/main.nf @@ -9,20 +9,20 @@ workflow test_diamond_blastp { db = [ file(params.test_data['sarscov2']['genome']['proteome_fasta'], checkIfExists: true) ] fasta = [ file(params.test_data['sarscov2']['genome']['proteome_fasta'], checkIfExists: true) ] - outext = 'txt' + out_ext = 'txt' blast_columns = 'qseqid qlen' DIAMOND_MAKEDB ( db ) - DIAMOND_BLASTP ( [ [id:'test'], fasta ], DIAMOND_MAKEDB.out.db, outext, blast_columns ) + DIAMOND_BLASTP ( [ [id:'test'], fasta ], DIAMOND_MAKEDB.out.db, out_ext, blast_columns ) } workflow test_diamond_blastp_daa { db = [ file(params.test_data['sarscov2']['genome']['proteome_fasta'], checkIfExists: true) ] fasta = [ file(params.test_data['sarscov2']['genome']['proteome_fasta'], checkIfExists: true) ] - outext = 'daa' + out_ext = 'daa' blast_columns = [] DIAMOND_MAKEDB ( db ) - DIAMOND_BLASTP ( [ [id:'test'], fasta ], DIAMOND_MAKEDB.out.db, outext, blast_columns ) + DIAMOND_BLASTP ( [ [id:'test'], fasta ], DIAMOND_MAKEDB.out.db, out_ext, blast_columns ) } diff --git a/tests/modules/diamond/blastx/main.nf b/tests/modules/diamond/blastx/main.nf index 8f244528..bb1b55a8 100644 --- a/tests/modules/diamond/blastx/main.nf +++ b/tests/modules/diamond/blastx/main.nf @@ -9,20 +9,20 @@ workflow test_diamond_blastx { db = [ file(params.test_data['sarscov2']['genome']['proteome_fasta'], checkIfExists: true) ] fasta = [ file(params.test_data['sarscov2']['genome']['transcriptome_fasta'], checkIfExists: true) ] - outext = 'txt' + out_ext = 'txt' blast_columns = 'qseqid qlen' DIAMOND_MAKEDB ( db ) - DIAMOND_BLASTX ( [ [id:'test'], fasta ], DIAMOND_MAKEDB.out.db, outext, blast_columns ) + DIAMOND_BLASTX ( [ [id:'test'], fasta ], DIAMOND_MAKEDB.out.db, out_ext, blast_columns ) } workflow test_diamond_blastx_daa { db = [ file(params.test_data['sarscov2']['genome']['proteome_fasta'], checkIfExists: true) ] fasta = [ file(params.test_data['sarscov2']['genome']['transcriptome_fasta'], checkIfExists: true) ] - outext = 'daa' + out_ext = 'daa' blast_columns = [] DIAMOND_MAKEDB ( db ) - DIAMOND_BLASTX ( [ [id:'test'], fasta ], DIAMOND_MAKEDB.out.db, outext, blast_columns ) + DIAMOND_BLASTX ( [ [id:'test'], fasta ], DIAMOND_MAKEDB.out.db, out_ext, blast_columns ) } From 08d5acbeb6c9e1b113ef3a0f4d32a6a1dbe522fe Mon Sep 17 00:00:00 2001 From: Lucpen Date: Mon, 2 May 2022 11:59:08 +0200 Subject: [PATCH 251/283] feat added tests for stubs --- tests/modules/gatk4/mergebamalignment/main.nf | 11 +++++++++++ tests/modules/gatk4/mergebamalignment/test.yml | 9 +++++++++ tests/modules/gatk4/mutect2/main.nf | 18 ++++++++++++++++++ tests/modules/gatk4/mutect2/test.yml | 11 +++++++++++ tests/modules/gatk4/revertsam/main.nf | 8 ++++++++ tests/modules/gatk4/revertsam/test.yml | 9 +++++++++ tests/modules/gatk4/samtofastq/main.nf | 8 ++++++++ tests/modules/gatk4/samtofastq/test.yml | 10 ++++++++++ tests/modules/samtools/view/main.nf | 9 +++++++++ tests/modules/samtools/view/test.yml | 8 ++++++++ 10 files changed, 101 insertions(+) diff --git a/tests/modules/gatk4/mergebamalignment/main.nf b/tests/modules/gatk4/mergebamalignment/main.nf index 8a38c129..ebedad9b 100644 --- a/tests/modules/gatk4/mergebamalignment/main.nf +++ b/tests/modules/gatk4/mergebamalignment/main.nf @@ -14,3 +14,14 @@ workflow test_gatk4_mergebamalignment { GATK4_MERGEBAMALIGNMENT ( input, fasta, dict ) } + +workflow test_gatk4_mergebamalignment_stubs { + input = [ [ id:'test' ], // meta map + file(params.test_data['sarscov2']['illumina']['test_single_end_bam'], checkIfExists: true), + file(params.test_data['sarscov2']['illumina']['test_unaligned_bam'], checkIfExists: true) + ] + fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) + dict = file(params.test_data['sarscov2']['genome']['genome_dict'], checkIfExists: true) + + GATK4_MERGEBAMALIGNMENT ( input, fasta, dict ) +} diff --git a/tests/modules/gatk4/mergebamalignment/test.yml b/tests/modules/gatk4/mergebamalignment/test.yml index b1bb32b2..84a67654 100644 --- a/tests/modules/gatk4/mergebamalignment/test.yml +++ b/tests/modules/gatk4/mergebamalignment/test.yml @@ -7,3 +7,12 @@ - path: output/gatk4/test.bam md5sum: e6f1b343700b7ccb94e81ae127433988 - path: output/gatk4/versions.yml + +- name: gatk4 mergebamalignment test_gatk4_mergebamalignment_stubs + command: nextflow run ./tests/modules/gatk4/mergebamalignment -entry test_gatk4_mergebamalignment -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/mergebamalignment/nextflow.config -stub-run + tags: + - gatk4 + - gatk4/mergebamalignment + files: + - path: output/gatk4/test.bam + - path: output/gatk4/versions.yml diff --git a/tests/modules/gatk4/mutect2/main.nf b/tests/modules/gatk4/mutect2/main.nf index 0b4339f0..619d28b8 100644 --- a/tests/modules/gatk4/mutect2/main.nf +++ b/tests/modules/gatk4/mutect2/main.nf @@ -118,3 +118,21 @@ workflow test_gatk4_mutect2_mitochondria { GATK4_MUTECT2_MITO ( input, fasta, fai, dict, [], [], [], [] ) } + +workflow test_gatk4_mutect2_tumor_single_stubs { + input = [ [ id:'test'], // meta map + [ file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_bam'], checkIfExists: true)], + [ file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_bam_bai'], checkIfExists: true)], + [] + ] + + fasta = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta'], checkIfExists: true) + fai = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta_fai'], checkIfExists: true) + dict = file(params.test_data['homo_sapiens']['genome']['genome_21_dict'], checkIfExists: true) + germline_resource = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_21_vcf_gz'], checkIfExists: true) + germline_resource_tbi = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_21_vcf_gz_tbi'], checkIfExists: true) + panel_of_normals = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_21_vcf_gz'], checkIfExists: true) + panel_of_normals_tbi = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_21_vcf_gz_tbi'], checkIfExists: true) + + GATK4_MUTECT2 ( input, fasta, fai, dict, germline_resource, germline_resource_tbi, panel_of_normals, panel_of_normals_tbi ) +} diff --git a/tests/modules/gatk4/mutect2/test.yml b/tests/modules/gatk4/mutect2/test.yml index 3cefce09..9232cedd 100644 --- a/tests/modules/gatk4/mutect2/test.yml +++ b/tests/modules/gatk4/mutect2/test.yml @@ -69,3 +69,14 @@ md5sum: fc6ea14ca2da346babe78161beea28c9 - path: output/gatk4/test.vcf.gz.tbi - path: output/gatk4/versions.yml + +- name: gatk4 mutect2 test_gatk4_mutect2_tumor_single_stubs + command: nextflow run ./tests/modules/gatk4/mutect2 -entry test_gatk4_mutect2_tumor_single -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/mutect2/nextflow.config -stub-run + tags: + - gatk4 + - gatk4/mutect2 + files: + - path: output/gatk4/test.vcf.gz + - path: output/gatk4/test.vcf.gz.stats + - path: output/gatk4/test.vcf.gz.tbi + - path: output/gatk4/versions.yml diff --git a/tests/modules/gatk4/revertsam/main.nf b/tests/modules/gatk4/revertsam/main.nf index ab5dddee..5b14d471 100644 --- a/tests/modules/gatk4/revertsam/main.nf +++ b/tests/modules/gatk4/revertsam/main.nf @@ -11,3 +11,11 @@ workflow test_gatk4_revertsam { GATK4_REVERTSAM ( input ) } + +workflow test_gatk4_revertsam_stubs { + input = [ [ id:'test' ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true) + ] + + GATK4_REVERTSAM ( input ) +} diff --git a/tests/modules/gatk4/revertsam/test.yml b/tests/modules/gatk4/revertsam/test.yml index 2ebdb685..89e78659 100644 --- a/tests/modules/gatk4/revertsam/test.yml +++ b/tests/modules/gatk4/revertsam/test.yml @@ -7,3 +7,12 @@ - path: output/gatk4/test.reverted.bam md5sum: f783a88deb45c3a2c20ca12cbe1c5652 - path: output/gatk4/versions.yml + +- name: gatk4 revertsam test_gatk4_revertsam_stubs + command: nextflow run ./tests/modules/gatk4/revertsam -entry test_gatk4_revertsam -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/revertsam/nextflow.config -stub-run + tags: + - gatk4 + - gatk4/revertsam + files: + - path: output/gatk4/test.reverted.bam + - path: output/gatk4/versions.yml diff --git a/tests/modules/gatk4/samtofastq/main.nf b/tests/modules/gatk4/samtofastq/main.nf index 26a8ce2d..aad80057 100644 --- a/tests/modules/gatk4/samtofastq/main.nf +++ b/tests/modules/gatk4/samtofastq/main.nf @@ -19,3 +19,11 @@ workflow test_gatk4_samtofastq_paired_end { GATK4_SAMTOFASTQ ( input ) } + +workflow test_gatk4_samtofastq_paired_end_stubs { + input = [ [ id:'test', single_end: false ], // meta map + [ file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true) ] + ] + + GATK4_SAMTOFASTQ ( input ) +} diff --git a/tests/modules/gatk4/samtofastq/test.yml b/tests/modules/gatk4/samtofastq/test.yml index eb25f33b..1288a270 100644 --- a/tests/modules/gatk4/samtofastq/test.yml +++ b/tests/modules/gatk4/samtofastq/test.yml @@ -19,3 +19,13 @@ - path: output/gatk4/test_2.fastq.gz md5sum: 613bf64c023609e1c62ad6ce9e4be8d7 - path: output/gatk4/versions.yml + +- name: gatk4 samtofastq test_gatk4_samtofastq_paired_end_stubs + command: nextflow run ./tests/modules/gatk4/samtofastq -entry test_gatk4_samtofastq_paired_end -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/samtofastq/nextflow.config -stub-run + tags: + - gatk4 + - gatk4/samtofastq + files: + - path: output/gatk4/test_1.fastq.gz + - path: output/gatk4/test_2.fastq.gz + - path: output/gatk4/versions.yml diff --git a/tests/modules/samtools/view/main.nf b/tests/modules/samtools/view/main.nf index 9c239066..0e3f597e 100644 --- a/tests/modules/samtools/view/main.nf +++ b/tests/modules/samtools/view/main.nf @@ -22,3 +22,12 @@ workflow test_samtools_view_cram { SAMTOOLS_VIEW ( input, fasta ) } + +workflow test_samtools_view_stubs { + input = [ [ id:'test', single_end:false ], // meta map + file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true), + [] + ] + + SAMTOOLS_VIEW ( input, [] ) +} diff --git a/tests/modules/samtools/view/test.yml b/tests/modules/samtools/view/test.yml index 1287d455..2718130e 100644 --- a/tests/modules/samtools/view/test.yml +++ b/tests/modules/samtools/view/test.yml @@ -14,3 +14,11 @@ - samtools files: - path: output/samtools/test.cram + +- name: samtools view test_samtools_view_stubs + command: nextflow run ./tests/modules/samtools/view -entry test_samtools_view -c ./tests/config/nextflow.config -c ./tests/modules/samtools/view/nextflow.config -stub-run + tags: + - samtools/view + - samtools + files: + - path: output/samtools/test.bam From 67b074382e418de8b86409af7d4b7663f6912a02 Mon Sep 17 00:00:00 2001 From: James Fellows Yates Date: Mon, 2 May 2022 12:29:00 +0200 Subject: [PATCH 252/283] Fix tests --- modules/diamond/blastp/main.nf | 14 +++++++------- modules/diamond/blastx/main.nf | 14 +++++++------- tests/modules/diamond/blastp/test.yml | 1 - tests/modules/diamond/blastx/test.yml | 1 - 4 files changed, 14 insertions(+), 16 deletions(-) diff --git a/modules/diamond/blastp/main.nf b/modules/diamond/blastp/main.nf index 0d78e230..fc77ee7b 100644 --- a/modules/diamond/blastp/main.nf +++ b/modules/diamond/blastp/main.nf @@ -14,13 +14,13 @@ process DIAMOND_BLASTP { val blast_columns output: - tuple val(meta), path('*.{blast}'), optional: true, emit: blast - tuple val(meta), path('*.{xml}') , optional: true, emit: xml - tuple val(meta), path('*.{txt}') , optional: true, emit: txt - tuple val(meta), path('*.{daa}') , optional: true, emit: daa - tuple val(meta), path('*.{sam}') , optional: true, emit: sam - tuple val(meta), path('*.{tsv}') , optional: true, emit: tsv - tuple val(meta), path('*.{paf}') , optional: true, emit: paf + tuple val(meta), path('*.blast'), optional: true, emit: blast + tuple val(meta), path('*.xml') , optional: true, emit: xml + tuple val(meta), path('*.txt') , optional: true, emit: txt + tuple val(meta), path('*.daa') , optional: true, emit: daa + tuple val(meta), path('*.sam') , optional: true, emit: sam + tuple val(meta), path('*.tsv') , optional: true, emit: tsv + tuple val(meta), path('*.paf') , optional: true, emit: paf path "versions.yml" , emit: versions when: diff --git a/modules/diamond/blastx/main.nf b/modules/diamond/blastx/main.nf index ef641435..3479eedb 100644 --- a/modules/diamond/blastx/main.nf +++ b/modules/diamond/blastx/main.nf @@ -14,13 +14,13 @@ process DIAMOND_BLASTX { val blast_columns output: - tuple val(meta), path('*.{blast}'), optional: true, emit: blast - tuple val(meta), path('*.{xml}') , optional: true, emit: xml - tuple val(meta), path('*.{txt}') , optional: true, emit: txt - tuple val(meta), path('*.{daa}') , optional: true, emit: daa - tuple val(meta), path('*.{sam}') , optional: true, emit: sam - tuple val(meta), path('*.{tsv}') , optional: true, emit: tsv - tuple val(meta), path('*.{paf}') , optional: true, emit: paf + tuple val(meta), path('*.blast'), optional: true, emit: blast + tuple val(meta), path('*.xml') , optional: true, emit: xml + tuple val(meta), path('*.txt') , optional: true, emit: txt + tuple val(meta), path('*.daa') , optional: true, emit: daa + tuple val(meta), path('*.sam') , optional: true, emit: sam + tuple val(meta), path('*.tsv') , optional: true, emit: tsv + tuple val(meta), path('*.paf') , optional: true, emit: paf path "versions.yml" , emit: versions when: diff --git a/tests/modules/diamond/blastp/test.yml b/tests/modules/diamond/blastp/test.yml index 4fad0cbf..aff4e1c5 100644 --- a/tests/modules/diamond/blastp/test.yml +++ b/tests/modules/diamond/blastp/test.yml @@ -5,7 +5,6 @@ - diamond files: - path: output/diamond/test.diamond_blastp.txt - md5sum: 2515cf88590afa32356497e79a51fce9 - path: output/diamond/versions.yml - name: diamond blastp test_diamond_blastp_daa diff --git a/tests/modules/diamond/blastx/test.yml b/tests/modules/diamond/blastx/test.yml index 9c30ba25..b2b6149f 100644 --- a/tests/modules/diamond/blastx/test.yml +++ b/tests/modules/diamond/blastx/test.yml @@ -5,7 +5,6 @@ - diamond/blastx files: - path: output/diamond/test.diamond_blastx.txt - md5sum: eb2aebfa1cb42fcb2121c65528663307 - path: output/diamond/versions.yml - name: diamond blastx test_diamond_blastx_daa From bd3bfe0817246082525ab93707976676b1fe208b Mon Sep 17 00:00:00 2001 From: James Fellows Yates Date: Mon, 2 May 2022 12:41:24 +0200 Subject: [PATCH 253/283] Add warn of default being used --- modules/diamond/blastp/main.nf | 5 +++++ modules/diamond/blastx/main.nf | 5 +++++ tests/modules/diamond/blastx/main.nf | 2 +- 3 files changed, 11 insertions(+), 1 deletion(-) diff --git a/modules/diamond/blastp/main.nf b/modules/diamond/blastp/main.nf index fc77ee7b..033186ea 100644 --- a/modules/diamond/blastp/main.nf +++ b/modules/diamond/blastp/main.nf @@ -38,6 +38,11 @@ process DIAMOND_BLASTP { case "sam": outfmt = 101; break case "tsv": outfmt = 102; break case "paf": outfmt = 103; break + default: + outfmt = '6'; + out_ext = 'txt'; + log.warn("Unknown output file format provided (${out_ext}): selecting DIAMOND default of tabular BLAST output (txt)"); + break } """ DB=`find -L ./ -name "*.dmnd" | sed 's/.dmnd//'` diff --git a/modules/diamond/blastx/main.nf b/modules/diamond/blastx/main.nf index 3479eedb..d3272279 100644 --- a/modules/diamond/blastx/main.nf +++ b/modules/diamond/blastx/main.nf @@ -38,6 +38,11 @@ process DIAMOND_BLASTX { case "sam": outfmt = 101; break case "tsv": outfmt = 102; break case "paf": outfmt = 103; break + default: + outfmt = '6'; + out_ext = 'txt'; + log.warn("Unknown output file format provided (${out_ext}): selecting DIAMOND default of tabular BLAST output (txt)"); + break } """ DB=`find -L ./ -name "*.dmnd" | sed 's/.dmnd//'` diff --git a/tests/modules/diamond/blastx/main.nf b/tests/modules/diamond/blastx/main.nf index bb1b55a8..847a64b1 100644 --- a/tests/modules/diamond/blastx/main.nf +++ b/tests/modules/diamond/blastx/main.nf @@ -9,7 +9,7 @@ workflow test_diamond_blastx { db = [ file(params.test_data['sarscov2']['genome']['proteome_fasta'], checkIfExists: true) ] fasta = [ file(params.test_data['sarscov2']['genome']['transcriptome_fasta'], checkIfExists: true) ] - out_ext = 'txt' + out_ext = 'tfdfdt' blast_columns = 'qseqid qlen' DIAMOND_MAKEDB ( db ) From f231291e7730654158cbd3f10b82c292e27fa273 Mon Sep 17 00:00:00 2001 From: Lucpen Date: Mon, 2 May 2022 12:58:12 +0200 Subject: [PATCH 254/283] fix gatk4_reversam test --- modules/gatk4/revertsam/main.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/gatk4/revertsam/main.nf b/modules/gatk4/revertsam/main.nf index 162aa0fa..3084658d 100644 --- a/modules/gatk4/revertsam/main.nf +++ b/modules/gatk4/revertsam/main.nf @@ -43,7 +43,7 @@ process GATK4_REVERTSAM { stub: def prefix = task.ext.prefix ?: "${meta.id}" """ - touch ${prefix}.bam + touch ${prefix}.reverted.bam cat <<-END_VERSIONS > versions.yml "${task.process}": From de40c1bf54b79efb4aa2396ded7f8f457cf33c02 Mon Sep 17 00:00:00 2001 From: Lucpen Date: Mon, 2 May 2022 13:15:01 +0200 Subject: [PATCH 255/283] chore removing file from test --- tests/modules/gatk4/mergebamalignment/main.nf | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/tests/modules/gatk4/mergebamalignment/main.nf b/tests/modules/gatk4/mergebamalignment/main.nf index ebedad9b..0eb6876d 100644 --- a/tests/modules/gatk4/mergebamalignment/main.nf +++ b/tests/modules/gatk4/mergebamalignment/main.nf @@ -16,12 +16,12 @@ workflow test_gatk4_mergebamalignment { } workflow test_gatk4_mergebamalignment_stubs { - input = [ [ id:'test' ], // meta map - file(params.test_data['sarscov2']['illumina']['test_single_end_bam'], checkIfExists: true), - file(params.test_data['sarscov2']['illumina']['test_unaligned_bam'], checkIfExists: true) + input = [ [ id:'test' ], // meta map + "test_foo.bam", + "test_bar.bam" ] - fasta = file(params.test_data['sarscov2']['genome']['genome_fasta'], checkIfExists: true) - dict = file(params.test_data['sarscov2']['genome']['genome_dict'], checkIfExists: true) + fasta = "genome.fasta" + dict = "genome.fasta.dict" GATK4_MERGEBAMALIGNMENT ( input, fasta, dict ) } From f1c2f624ebf8fb4368b6d5f26ea5b3cfafadf25c Mon Sep 17 00:00:00 2001 From: "James A. Fellows Yates" Date: Mon, 2 May 2022 13:17:22 +0200 Subject: [PATCH 256/283] Update tests/modules/diamond/blastx/main.nf Co-authored-by: Mahesh Binzer-Panchal --- tests/modules/diamond/blastx/main.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/modules/diamond/blastx/main.nf b/tests/modules/diamond/blastx/main.nf index 847a64b1..8316aa91 100644 --- a/tests/modules/diamond/blastx/main.nf +++ b/tests/modules/diamond/blastx/main.nf @@ -9,7 +9,7 @@ workflow test_diamond_blastx { db = [ file(params.test_data['sarscov2']['genome']['proteome_fasta'], checkIfExists: true) ] fasta = [ file(params.test_data['sarscov2']['genome']['transcriptome_fasta'], checkIfExists: true) ] - out_ext = 'tfdfdt' + out_ext = 'tfdfdt' // Nonsense file extension to check default case. blast_columns = 'qseqid qlen' DIAMOND_MAKEDB ( db ) From 3a4e415fe21982ccb39807d71c16e7d6ad0a1c1a Mon Sep 17 00:00:00 2001 From: Lucpen Date: Mon, 2 May 2022 13:30:18 +0200 Subject: [PATCH 257/283] chores adding dummy files for stubs --- tests/modules/gatk4/mutect2/main.nf | 18 +++++++++--------- tests/modules/gatk4/revertsam/main.nf | 2 +- tests/modules/gatk4/samtofastq/main.nf | 2 +- tests/modules/samtools/view/main.nf | 2 +- 4 files changed, 12 insertions(+), 12 deletions(-) diff --git a/tests/modules/gatk4/mutect2/main.nf b/tests/modules/gatk4/mutect2/main.nf index 619d28b8..251e1987 100644 --- a/tests/modules/gatk4/mutect2/main.nf +++ b/tests/modules/gatk4/mutect2/main.nf @@ -121,18 +121,18 @@ workflow test_gatk4_mutect2_mitochondria { workflow test_gatk4_mutect2_tumor_single_stubs { input = [ [ id:'test'], // meta map - [ file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_bam'], checkIfExists: true)], - [ file(params.test_data['homo_sapiens']['illumina']['test2_paired_end_recalibrated_sorted_bam_bai'], checkIfExists: true)], + [ "foo.bam" ], + [ "foo.bam.bai" ], [] ] - fasta = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta'], checkIfExists: true) - fai = file(params.test_data['homo_sapiens']['genome']['genome_21_fasta_fai'], checkIfExists: true) - dict = file(params.test_data['homo_sapiens']['genome']['genome_21_dict'], checkIfExists: true) - germline_resource = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_21_vcf_gz'], checkIfExists: true) - germline_resource_tbi = file(params.test_data['homo_sapiens']['genome']['gnomad_r2_1_1_21_vcf_gz_tbi'], checkIfExists: true) - panel_of_normals = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_21_vcf_gz'], checkIfExists: true) - panel_of_normals_tbi = file(params.test_data['homo_sapiens']['genome']['mills_and_1000g_indels_21_vcf_gz_tbi'], checkIfExists: true) + fasta = "genome.fasta" + fai = "genome.fasta.fai" + dict = "genome.fasta.dict" + germline_resource = "genome_gnomAD.r2.1.1.vcf.gz" + germline_resource_tbi = "genome_gnomAD.r2.1.1.vcf.gz.tbi" + panel_of_normals = "genome_mills_and_1000G.indels.hg38.vcf.gz" + panel_of_normals_tbi = "genome_mills_and_1000G.indels.hg38.vcf.gz.tbi" GATK4_MUTECT2 ( input, fasta, fai, dict, germline_resource, germline_resource_tbi, panel_of_normals, panel_of_normals_tbi ) } diff --git a/tests/modules/gatk4/revertsam/main.nf b/tests/modules/gatk4/revertsam/main.nf index 5b14d471..738ecd8f 100644 --- a/tests/modules/gatk4/revertsam/main.nf +++ b/tests/modules/gatk4/revertsam/main.nf @@ -14,7 +14,7 @@ workflow test_gatk4_revertsam { workflow test_gatk4_revertsam_stubs { input = [ [ id:'test' ], // meta map - file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true) + "foo_paired_end.bam" ] GATK4_REVERTSAM ( input ) diff --git a/tests/modules/gatk4/samtofastq/main.nf b/tests/modules/gatk4/samtofastq/main.nf index aad80057..79d04c7c 100644 --- a/tests/modules/gatk4/samtofastq/main.nf +++ b/tests/modules/gatk4/samtofastq/main.nf @@ -22,7 +22,7 @@ workflow test_gatk4_samtofastq_paired_end { workflow test_gatk4_samtofastq_paired_end_stubs { input = [ [ id:'test', single_end: false ], // meta map - [ file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true) ] + [ "foo_paired_end.bam" ] ] GATK4_SAMTOFASTQ ( input ) diff --git a/tests/modules/samtools/view/main.nf b/tests/modules/samtools/view/main.nf index 0e3f597e..bdad1078 100644 --- a/tests/modules/samtools/view/main.nf +++ b/tests/modules/samtools/view/main.nf @@ -25,7 +25,7 @@ workflow test_samtools_view_cram { workflow test_samtools_view_stubs { input = [ [ id:'test', single_end:false ], // meta map - file(params.test_data['sarscov2']['illumina']['test_paired_end_bam'], checkIfExists: true), + "foo_paired_end.bam", [] ] From da82e06354c34bf6381a86e5af195b24d7ef98ee Mon Sep 17 00:00:00 2001 From: Lucpen Date: Mon, 2 May 2022 14:16:07 +0200 Subject: [PATCH 258/283] fix gatk4_mutect2 test --- tests/modules/gatk4/mutect2/main.nf | 14 +++++++++----- tests/modules/gatk4/mutect2/test.yml | 5 +++-- 2 files changed, 12 insertions(+), 7 deletions(-) diff --git a/tests/modules/gatk4/mutect2/main.nf b/tests/modules/gatk4/mutect2/main.nf index 251e1987..486b78ad 100644 --- a/tests/modules/gatk4/mutect2/main.nf +++ b/tests/modules/gatk4/mutect2/main.nf @@ -119,10 +119,14 @@ workflow test_gatk4_mutect2_mitochondria { GATK4_MUTECT2_MITO ( input, fasta, fai, dict, [], [], [], [] ) } -workflow test_gatk4_mutect2_tumor_single_stubs { - input = [ [ id:'test'], // meta map - [ "foo.bam" ], - [ "foo.bam.bai" ], +workflow test_gatk4_mutect2_tumor_normal_pair_f1r2_stubs { + input = [ [ id:'test', normal_id:'normal', tumor_id:'tumour' ], // meta map + [ "foo_parired.bam", + "foo_parired2.bam" + ], + [ "foo_parired.bam.bai", + "foo_parired2.bam.bai" + ], [] ] @@ -134,5 +138,5 @@ workflow test_gatk4_mutect2_tumor_single_stubs { panel_of_normals = "genome_mills_and_1000G.indels.hg38.vcf.gz" panel_of_normals_tbi = "genome_mills_and_1000G.indels.hg38.vcf.gz.tbi" - GATK4_MUTECT2 ( input, fasta, fai, dict, germline_resource, germline_resource_tbi, panel_of_normals, panel_of_normals_tbi ) + GATK4_MUTECT2_F1R2 ( input, fasta, fai, dict, germline_resource, germline_resource_tbi, panel_of_normals, panel_of_normals_tbi ) } diff --git a/tests/modules/gatk4/mutect2/test.yml b/tests/modules/gatk4/mutect2/test.yml index 9232cedd..3853801d 100644 --- a/tests/modules/gatk4/mutect2/test.yml +++ b/tests/modules/gatk4/mutect2/test.yml @@ -70,12 +70,13 @@ - path: output/gatk4/test.vcf.gz.tbi - path: output/gatk4/versions.yml -- name: gatk4 mutect2 test_gatk4_mutect2_tumor_single_stubs - command: nextflow run ./tests/modules/gatk4/mutect2 -entry test_gatk4_mutect2_tumor_single -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/mutect2/nextflow.config -stub-run +- name: gatk4 mutect2 test_gatk4_mutect2_tumor_normal_pair_f1r2_stubs + command: nextflow run ./tests/modules/gatk4/mutect2 -entry test_gatk4_mutect2_tumor_normal_pair_f1r2 -c ./tests/config/nextflow.config -c ./tests/modules/gatk4/mutect2/nextflow.config -stub-run tags: - gatk4 - gatk4/mutect2 files: + - path: output/gatk4/test.f1r2.tar.gz - path: output/gatk4/test.vcf.gz - path: output/gatk4/test.vcf.gz.stats - path: output/gatk4/test.vcf.gz.tbi From ed4dd1a928ebf4308efb720de878045f7773f8e2 Mon Sep 17 00:00:00 2001 From: ljmesi <37740329+ljmesi@users.noreply.github.com> Date: Mon, 2 May 2022 14:16:12 +0200 Subject: [PATCH 259/283] Add additional handling of fq.gz file name suffix --- modules/metaphlan3/main.nf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/metaphlan3/main.nf b/modules/metaphlan3/main.nf index 3fc6b277..bff0eb9a 100644 --- a/modules/metaphlan3/main.nf +++ b/modules/metaphlan3/main.nf @@ -23,7 +23,7 @@ process METAPHLAN3 { script: def args = task.ext.args ?: '' def prefix = task.ext.prefix ?: "${meta.id}" - def input_type = ("$input".endsWith(".fastq.gz")) ? "--input_type fastq" : ("$input".contains(".fasta")) ? "--input_type fasta" : ("$input".endsWith(".bowtie2out.txt")) ? "--input_type bowtie2out" : "--input_type sam" + def input_type = ("$input".endsWith(".fastq.gz") || "$input".endsWith(".fq.gz")) ? "--input_type fastq" : ("$input".contains(".fasta")) ? "--input_type fasta" : ("$input".endsWith(".bowtie2out.txt")) ? "--input_type bowtie2out" : "--input_type sam" def input_data = ("$input_type".contains("fastq")) && !meta.single_end ? "${input[0]},${input[1]}" : "$input" def bowtie2_out = "$input_type" == "--input_type bowtie2out" || "$input_type" == "--input_type sam" ? '' : "--bowtie2out ${prefix}.bowtie2out.txt" From 5cd517e7596b8869d27c01505029ac1449067e95 Mon Sep 17 00:00:00 2001 From: Jasmin F <73216762+jasmezz@users.noreply.github.com> Date: Mon, 2 May 2022 14:31:40 +0200 Subject: [PATCH 260/283] Apply suggestions from code review Co-authored-by: James A. Fellows Yates --- modules/antismash/antismashlitedownloaddatabases/meta.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/antismash/antismashlitedownloaddatabases/meta.yml b/modules/antismash/antismashlitedownloaddatabases/meta.yml index 4b9644c4..f7ddf3b0 100644 --- a/modules/antismash/antismashlitedownloaddatabases/meta.yml +++ b/modules/antismash/antismashlitedownloaddatabases/meta.yml @@ -27,7 +27,7 @@ input: - database_css: type: directory description: | - antismash/outputs/html/css folder which is being created during the antiSMASH database downloading step. These files are normally downloaded by download-antismash-databases itself, and must be retrieved by ther use by manually running the command with conda or a standalone installation of antiSMASH. Therefore we do not recommend using this module for production pipelines, but rather require users to specify their own local copy of the antiSMASH database in pipelines. + antismash/outputs/html/css folder which is being created during the antiSMASH database downloading step. These files are normally downloaded by download-antismash-databases itself, and must be retrieved by the user by manually running the command with conda or a standalone installation of antiSMASH. Therefore we do not recommend using this module for production pipelines, but rather require users to specify their own local copy of the antiSMASH database in pipelines. pattern: "css" - database_detection: type: directory From 0fc352188b8060331ee4268ee5492d9a54905b47 Mon Sep 17 00:00:00 2001 From: jasmezz Date: Mon, 2 May 2022 14:40:35 +0200 Subject: [PATCH 261/283] Fix conda antismash path --- modules/antismash/antismashlitedownloaddatabases/main.nf | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/modules/antismash/antismashlitedownloaddatabases/main.nf b/modules/antismash/antismashlitedownloaddatabases/main.nf index 5f9141f0..da8a750d 100644 --- a/modules/antismash/antismashlitedownloaddatabases/main.nf +++ b/modules/antismash/antismashlitedownloaddatabases/main.nf @@ -35,12 +35,19 @@ process ANTISMASH_ANTISMASHLITEDOWNLOADDATABASES { script: def args = task.ext.args ?: '' + conda = params.enable_conda """ download-antismash-databases \\ --database-dir antismash_db \\ $args - cp -r /usr/local/lib/python3.8/site-packages/antismash antismash_dir + if [[ $conda = false ]]; \ + then \ + cp -r /usr/local/lib/python3.8/site-packages/antismash antismash_dir; \ + else \ + antismash_path=\$(python -c 'import antismash;print(antismash.__file__.split("__")[0])') \ + cp -r \$antismash_path antismash_dir; \ + fi cat <<-END_VERSIONS > versions.yml "${task.process}": From 62c6123ec48e15b42bd60b344603a83b658054d8 Mon Sep 17 00:00:00 2001 From: Lucpen Date: Mon, 2 May 2022 15:11:57 +0200 Subject: [PATCH 262/283] fix gatk4_mutect2 test by changing main.nf --- modules/gatk4/mutect2/main.nf | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/gatk4/mutect2/main.nf b/modules/gatk4/mutect2/main.nf index a214b57d..9969ad70 100644 --- a/modules/gatk4/mutect2/main.nf +++ b/modules/gatk4/mutect2/main.nf @@ -62,8 +62,8 @@ process GATK4_MUTECT2 { def prefix = task.ext.prefix ?: "${meta.id}" """ touch ${prefix}.vcf.gz - touch ${prefix}.tbi - touch ${prefix}.stats + touch ${prefix}.vcf.gz.tbi + touch ${prefix}.vcf.gz.stats touch ${prefix}.f1r2.tar.gz cat <<-END_VERSIONS > versions.yml From 50cc136a7810323f2c802ff3bcc1bf54c85d57a9 Mon Sep 17 00:00:00 2001 From: Lucpen Date: Mon, 2 May 2022 15:14:05 +0200 Subject: [PATCH 263/283] Update tests/modules/gatk4/mutect2/main.nf fix spelling mistake Co-authored-by: Maxime U. Garcia --- tests/modules/gatk4/mutect2/main.nf | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/modules/gatk4/mutect2/main.nf b/tests/modules/gatk4/mutect2/main.nf index 486b78ad..310e9ca1 100644 --- a/tests/modules/gatk4/mutect2/main.nf +++ b/tests/modules/gatk4/mutect2/main.nf @@ -121,11 +121,11 @@ workflow test_gatk4_mutect2_mitochondria { workflow test_gatk4_mutect2_tumor_normal_pair_f1r2_stubs { input = [ [ id:'test', normal_id:'normal', tumor_id:'tumour' ], // meta map - [ "foo_parired.bam", - "foo_parired2.bam" + [ "foo_paired.bam", + "foo_paired2.bam" ], - [ "foo_parired.bam.bai", - "foo_parired2.bam.bai" + [ "foo_paired.bam.bai", + "foo_paired2.bam.bai" ], [] ] From 6a9f2077cf6f224c547e4c7ff0f703cedc69bda2 Mon Sep 17 00:00:00 2001 From: jasmezz Date: Mon, 2 May 2022 16:41:59 +0200 Subject: [PATCH 264/283] Fix unbound variable crash --- modules/antismash/antismashlitedownloaddatabases/main.nf | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/modules/antismash/antismashlitedownloaddatabases/main.nf b/modules/antismash/antismashlitedownloaddatabases/main.nf index da8a750d..fbb92490 100644 --- a/modules/antismash/antismashlitedownloaddatabases/main.nf +++ b/modules/antismash/antismashlitedownloaddatabases/main.nf @@ -37,16 +37,15 @@ process ANTISMASH_ANTISMASHLITEDOWNLOADDATABASES { def args = task.ext.args ?: '' conda = params.enable_conda """ - download-antismash-databases \\ - --database-dir antismash_db \\ - $args + #download-antismash-databases \\ + # --database-dir antismash_db \\ + # $args if [[ $conda = false ]]; \ then \ cp -r /usr/local/lib/python3.8/site-packages/antismash antismash_dir; \ else \ - antismash_path=\$(python -c 'import antismash;print(antismash.__file__.split("__")[0])') \ - cp -r \$antismash_path antismash_dir; \ + cp -r \$(python -c 'import antismash;print(antismash.__file__.split("/__")[0])') antismash_dir; \ fi cat <<-END_VERSIONS > versions.yml From 8db0b754c0422cb80d06f47986d660f8aaf02c89 Mon Sep 17 00:00:00 2001 From: Jasmin F <73216762+jasmezz@users.noreply.github.com> Date: Mon, 2 May 2022 16:56:39 +0200 Subject: [PATCH 265/283] Fix commented-out command --- modules/antismash/antismashlitedownloaddatabases/main.nf | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/modules/antismash/antismashlitedownloaddatabases/main.nf b/modules/antismash/antismashlitedownloaddatabases/main.nf index fbb92490..a0928333 100644 --- a/modules/antismash/antismashlitedownloaddatabases/main.nf +++ b/modules/antismash/antismashlitedownloaddatabases/main.nf @@ -37,9 +37,9 @@ process ANTISMASH_ANTISMASHLITEDOWNLOADDATABASES { def args = task.ext.args ?: '' conda = params.enable_conda """ - #download-antismash-databases \\ - # --database-dir antismash_db \\ - # $args + download-antismash-databases \\ + --database-dir antismash_db \\ + $args if [[ $conda = false ]]; \ then \ From 24dc2d21132b07b4c3d36c9836885427a5019b11 Mon Sep 17 00:00:00 2001 From: jvhagey Date: Mon, 2 May 2022 11:30:19 -0400 Subject: [PATCH 266/283] added tests --- modules/srst2/srst2/main.nf | 4 ++-- modules/srst2/srst2/meta.yml | 6 +++++- tests/modules/srst2/srst2/main.nf | 31 +++++++++++++++++++++++++++--- tests/modules/srst2/srst2/test.yml | 21 ++++++++++++++++++-- 4 files changed, 54 insertions(+), 8 deletions(-) diff --git a/modules/srst2/srst2/main.nf b/modules/srst2/srst2/main.nf index 4ee9f6a6..e8a91716 100644 --- a/modules/srst2/srst2/main.nf +++ b/modules/srst2/srst2/main.nf @@ -11,8 +11,9 @@ process SRST2_SRST2 { tuple val(meta), path(fastq_s), path(db) output: - tuple val(meta), path("*_genes_*_results.txt") , emit: gene_results + tuple val(meta), path("*_genes_*_results.txt") , optional:true, emit: gene_results tuple val(meta), path("*_fullgenes_*_results.txt") , optional:true, emit: fullgene_results + tuple val(meta), path("*_mlst_*_results.txt") , optional:true, emit: mlst_results tuple val(meta), path("*.pileup") , emit: pileup tuple val(meta), path("*.sorted.bam") , emit: sorted_bam path "versions.yml" , emit: versions @@ -38,7 +39,6 @@ process SRST2_SRST2 { --output ${prefix} \\ ${database} \\ $args - cat <<-END_VERSIONS > versions.yml "${task.process}": srst2: \$(echo \$(srst2 --version 2>&1) | sed 's/srst2 //' )) diff --git a/modules/srst2/srst2/meta.yml b/modules/srst2/srst2/meta.yml index 3755fb34..94c763bb 100644 --- a/modules/srst2/srst2/meta.yml +++ b/modules/srst2/srst2/meta.yml @@ -49,12 +49,16 @@ output: pattern: "versions.yml" - txt: type: file - description: A detailed report, with one row per gene per sample described here: https://github.com/katholt/srst2#gene-typing + description: A detailed report, with one row per gene per sample described here github.com/katholt/srst2#gene-typing pattern: "*_fullgenes_*_results.txt" - txt: type: file description: A tabulated summary report of samples x genes. pattern: "*_genes_*_results.txt" + - txt: + type: file + description: A tabulated summary report of mlst subtyping. + pattern: "*_mlst_*_results.txt" - bam: type: file description: Sorted BAM file diff --git a/tests/modules/srst2/srst2/main.nf b/tests/modules/srst2/srst2/main.nf index 235f3ff9..7d51937b 100644 --- a/tests/modules/srst2/srst2/main.nf +++ b/tests/modules/srst2/srst2/main.nf @@ -4,9 +4,34 @@ nextflow.enable.dsl = 2 include { SRST2_SRST2 } from '../../../../modules/srst2/srst2/main.nf' +workflow test_srst2_srst2_exit { + + input = [ + [ id:'test', single_end:false, db:"test"], // meta map + [ file(params.test_data['bacteroides_fragilis']['illumina']['test1_1_fastq_gz'], checkIfExists: true), + file(params.test_data['bacteroides_fragilis']['illumina']['test1_2_fastq_gz'], checkIfExists: true) ], +// [("")] + file('https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/delete_me/srst2/resFinder_20180221_srst2.fasta') + ] + + SRST2_SRST2(input) +} + +workflow test_srst2_srst2_mlst { + + input = [ + [ id:'test', single_end:false, db:"mlst"], // meta map + [ file("https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/delete_me/srst2/SRR9067271_1.fastq.gz", checkIfExists: true), + file("https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/delete_me/srst2/SRR9067271_2.fastq.gz", checkIfExists: true) ], + file('https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/delete_me/srst2/MLST_DB.fas') + ] + + SRST2_SRST2(input) +} + workflow test_srst2_srst2_paired_end { - input = [ + input = [ [ id:'test', single_end:false, db:"gene"], // meta map [ file(params.test_data['bacteroides_fragilis']['illumina']['test1_1_fastq_gz'], checkIfExists: true), file(params.test_data['bacteroides_fragilis']['illumina']['test1_2_fastq_gz'], checkIfExists: true) ], @@ -18,11 +43,11 @@ workflow test_srst2_srst2_paired_end { workflow test_srst2_srst2_single_end { - input = [ + input = [ [ id:'test', single_end:true, db:"gene" ], // meta map file(params.test_data['bacteroides_fragilis']['illumina']['test1_1_fastq_gz'], checkIfExists: true), file('https://raw.githubusercontent.com/nf-core/test-datasets/modules/data/delete_me/srst2/resFinder_20180221_srst2.fasta') // Change to params.test_data syntax after the data is included in tests/config/test_data.config ] SRST2_SRST2(input) -} \ No newline at end of file +} diff --git a/tests/modules/srst2/srst2/test.yml b/tests/modules/srst2/srst2/test.yml index f011b4d1..102c78a4 100644 --- a/tests/modules/srst2/srst2/test.yml +++ b/tests/modules/srst2/srst2/test.yml @@ -1,8 +1,22 @@ +- name: srst2 srst2 test_srst2_srst2_mlst + command: nextflow run tests/modules/srst2/srst2 -entry test_srst2_srst2_mlst -c tests/config/nextflow.config + tags: + - srst2/srst2 + - srst2 + files: + - path: output/srst2/test__SRR9067271.MLST_DB.pileup + md5sum: f59217dd9340264b9913c20b545b2ce7 + - path: output/srst2/test__SRR9067271.MLST_DB.sorted.bam + - path: output/srst2/test__mlst__MLST_DB__results.txt + md5sum: ec1b1f69933401d67c57f64cad11a098 + - path: output/srst2/versions.yml + md5sum: a0c256a2fd3636069710b8ef22ee5ea7 + - name: srst2 srst2 test_srst2_srst2_paired_end command: nextflow run tests/modules/srst2/srst2 -entry test_srst2_srst2_paired_end -c tests/config/nextflow.config tags: - - srst2 - srst2/srst2 + - srst2 files: - path: output/srst2/test__genes__resFinder_20180221_srst2__results.txt md5sum: 099aa6cacec5524b311f606debdfb3a9 @@ -15,8 +29,8 @@ - name: srst2 srst2 test_srst2_srst2_single_end command: nextflow run tests/modules/srst2/srst2 -entry test_srst2_srst2_single_end -c tests/config/nextflow.config tags: - - srst2 - srst2/srst2 + - srst2 files: - path: output/srst2/test__fullgenes__resFinder_20180221_srst2__results.txt md5sum: d0762ef8c38afd0e0a34cce52ed1a3db @@ -27,3 +41,6 @@ - path: output/srst2/test__test1_1.resFinder_20180221_srst2.sorted.bam - path: output/srst2/versions.yml md5sum: 790fe00493c6634d17801a930073218b + +- name: srst2 srst2 test_srst2_srst2_exit #Testing pipeline exit when not meta.db + exit_code: 1 From cd45dc15503972b3f4bce572fe98168f68e66f9e Mon Sep 17 00:00:00 2001 From: jvhagey Date: Mon, 2 May 2022 11:38:39 -0400 Subject: [PATCH 267/283] linting test.yml --- tests/modules/srst2/srst2/test.yml | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/tests/modules/srst2/srst2/test.yml b/tests/modules/srst2/srst2/test.yml index 102c78a4..9e448211 100644 --- a/tests/modules/srst2/srst2/test.yml +++ b/tests/modules/srst2/srst2/test.yml @@ -1,3 +1,9 @@ +- name: srst2 srst2 test_srst2_srst2_exit #Testing pipeline exit when not meta.db + tags: + - srst2/srst2 + - srst2 + exit_code: 1 + - name: srst2 srst2 test_srst2_srst2_mlst command: nextflow run tests/modules/srst2/srst2 -entry test_srst2_srst2_mlst -c tests/config/nextflow.config tags: @@ -41,6 +47,3 @@ - path: output/srst2/test__test1_1.resFinder_20180221_srst2.sorted.bam - path: output/srst2/versions.yml md5sum: 790fe00493c6634d17801a930073218b - -- name: srst2 srst2 test_srst2_srst2_exit #Testing pipeline exit when not meta.db - exit_code: 1 From 99a87119970762fea09f8c71080dabf88d0836f3 Mon Sep 17 00:00:00 2001 From: "Jill V. Hagey, PhD" Date: Mon, 2 May 2022 12:26:56 -0400 Subject: [PATCH 268/283] Update tests/modules/srst2/srst2/test.yml Co-authored-by: Edmund Miller --- tests/modules/srst2/srst2/test.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/modules/srst2/srst2/test.yml b/tests/modules/srst2/srst2/test.yml index 9e448211..5ed236d3 100644 --- a/tests/modules/srst2/srst2/test.yml +++ b/tests/modules/srst2/srst2/test.yml @@ -1,4 +1,5 @@ - name: srst2 srst2 test_srst2_srst2_exit #Testing pipeline exit when not meta.db + command: nextflow run tests/modules/srst2/srst2 -entry test_srst2_srst2_exit -c tests/config/nextflow.config tags: - srst2/srst2 - srst2 From e5d7c80bb50dc58c360c3234faa243964d1a2dbb Mon Sep 17 00:00:00 2001 From: "Jill V. Hagey, PhD" Date: Mon, 2 May 2022 18:13:14 -0400 Subject: [PATCH 269/283] Update test.yml --- tests/modules/srst2/srst2/test.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/modules/srst2/srst2/test.yml b/tests/modules/srst2/srst2/test.yml index 5ed236d3..9ea38dd6 100644 --- a/tests/modules/srst2/srst2/test.yml +++ b/tests/modules/srst2/srst2/test.yml @@ -12,11 +12,11 @@ - srst2 files: - path: output/srst2/test__SRR9067271.MLST_DB.pileup - md5sum: f59217dd9340264b9913c20b545b2ce7 + contains: "dnaJ_1 2 C 17 .........,....... FFFFFFFFFFFFFFFFF" - path: output/srst2/test__SRR9067271.MLST_DB.sorted.bam - path: output/srst2/test__mlst__MLST_DB__results.txt md5sum: ec1b1f69933401d67c57f64cad11a098 - - path: output/srst2/versions.yml + - path: /tmp/tmp74070b6r/srst2/versions.yml md5sum: a0c256a2fd3636069710b8ef22ee5ea7 - name: srst2 srst2 test_srst2_srst2_paired_end From 7b0d6f96fd0764254510df99e238be33b0acf862 Mon Sep 17 00:00:00 2001 From: "Jill V. Hagey, PhD" Date: Mon, 2 May 2022 18:20:54 -0400 Subject: [PATCH 270/283] Update test.yml --- tests/modules/srst2/srst2/test.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/modules/srst2/srst2/test.yml b/tests/modules/srst2/srst2/test.yml index 9ea38dd6..2b0c0d70 100644 --- a/tests/modules/srst2/srst2/test.yml +++ b/tests/modules/srst2/srst2/test.yml @@ -12,7 +12,8 @@ - srst2 files: - path: output/srst2/test__SRR9067271.MLST_DB.pileup - contains: "dnaJ_1 2 C 17 .........,....... FFFFFFFFFFFFFFFFF" + contains: + - "dnaJ_1 2 C 17 .........,....... FFFFFFFFFFFFFFFFF" - path: output/srst2/test__SRR9067271.MLST_DB.sorted.bam - path: output/srst2/test__mlst__MLST_DB__results.txt md5sum: ec1b1f69933401d67c57f64cad11a098 From db9681029c7a75da227577e29f26cb26de78a2e5 Mon Sep 17 00:00:00 2001 From: "Jill V. Hagey, PhD" Date: Mon, 2 May 2022 18:27:01 -0400 Subject: [PATCH 271/283] Update test.yml --- tests/modules/srst2/srst2/test.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/modules/srst2/srst2/test.yml b/tests/modules/srst2/srst2/test.yml index 2b0c0d70..12b2a4f0 100644 --- a/tests/modules/srst2/srst2/test.yml +++ b/tests/modules/srst2/srst2/test.yml @@ -13,11 +13,11 @@ files: - path: output/srst2/test__SRR9067271.MLST_DB.pileup contains: - - "dnaJ_1 2 C 17 .........,....... FFFFFFFFFFFFFFFFF" + - "dnaJ-1 2 C 17 .........,....... FFFFFFFFFFFFFFFFF" - path: output/srst2/test__SRR9067271.MLST_DB.sorted.bam - path: output/srst2/test__mlst__MLST_DB__results.txt md5sum: ec1b1f69933401d67c57f64cad11a098 - - path: /tmp/tmp74070b6r/srst2/versions.yml + - path: output/srst2/versions.yml md5sum: a0c256a2fd3636069710b8ef22ee5ea7 - name: srst2 srst2 test_srst2_srst2_paired_end From 23cfb5dd42dc3dba37ee4588cf1262ce5c59ef6e Mon Sep 17 00:00:00 2001 From: "Jill V. Hagey, PhD" Date: Mon, 2 May 2022 18:38:48 -0400 Subject: [PATCH 272/283] Update test.yml --- tests/modules/srst2/srst2/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/modules/srst2/srst2/test.yml b/tests/modules/srst2/srst2/test.yml index 12b2a4f0..f7621f4f 100644 --- a/tests/modules/srst2/srst2/test.yml +++ b/tests/modules/srst2/srst2/test.yml @@ -13,7 +13,7 @@ files: - path: output/srst2/test__SRR9067271.MLST_DB.pileup contains: - - "dnaJ-1 2 C 17 .........,....... FFFFFFFFFFFFFFFFF" + - "dnaJ-1 2 C 17 .........,....... FFFFFFFFFFFFFFFFF" - path: output/srst2/test__SRR9067271.MLST_DB.sorted.bam - path: output/srst2/test__mlst__MLST_DB__results.txt md5sum: ec1b1f69933401d67c57f64cad11a098 From b8a59c2b173cf0d987effc8a3e1cd547307a6085 Mon Sep 17 00:00:00 2001 From: Nicolas Vannieuwkerke Date: Tue, 3 May 2022 08:20:10 +0200 Subject: [PATCH 273/283] Fixed version annotation --- modules/vardictjava/main.nf | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/modules/vardictjava/main.nf b/modules/vardictjava/main.nf index 08318c29..682c26be 100644 --- a/modules/vardictjava/main.nf +++ b/modules/vardictjava/main.nf @@ -24,6 +24,8 @@ process VARDICTJAVA { def args2 = task.ext.args2 ?: '' def prefix = task.ext.prefix ?: "${meta.id}" + def VERSION = '1.8.3' + """ vardict-java \\ $args \\ @@ -41,7 +43,8 @@ process VARDICTJAVA { cat <<-END_VERSIONS > versions.yml "${task.process}": - vardict-java: \$(echo 1.8.3) + vardict-java: \$VERSION + var2vcf_valid.pl: \$(echo \$(var2vcf_valid.pl -h | sed -n 2p | awk '{ print \$2 }')) END_VERSIONS """ } From 9a10e5beb56904061e0b728fea55c8246af74997 Mon Sep 17 00:00:00 2001 From: Nicolas Vannieuwkerke Date: Tue, 3 May 2022 08:29:46 +0200 Subject: [PATCH 274/283] Added the missing inputs in meta.yml + correct notation --- modules/vardictjava/main.nf | 10 +++++----- modules/vardictjava/meta.yml | 16 ++++++++++++++-- tests/modules/vardictjava/nextflow.config | 4 ---- tests/modules/vardictjava/test.yml | 2 +- 4 files changed, 20 insertions(+), 12 deletions(-) diff --git a/modules/vardictjava/main.nf b/modules/vardictjava/main.nf index 682c26be..14964989 100644 --- a/modules/vardictjava/main.nf +++ b/modules/vardictjava/main.nf @@ -9,8 +9,8 @@ process VARDICTJAVA { input: tuple val(meta), path(bam), path(bai) - path(regions_of_interest) - tuple path(reference_fasta), path(reference_fai) + path(bed) + tuple path(fasta), path(fasta_fai) output: tuple val(meta), path("*.vcf.gz"), emit: vcf @@ -33,8 +33,8 @@ process VARDICTJAVA { -b $bam \\ -th $task.cpus \\ -N $prefix \\ - -G $reference_fasta \\ - $regions_of_interest \\ + -G $fasta \\ + $bed \\ | teststrandbias.R \\ | var2vcf_valid.pl \\ $args2 \\ @@ -43,7 +43,7 @@ process VARDICTJAVA { cat <<-END_VERSIONS > versions.yml "${task.process}": - vardict-java: \$VERSION + vardict-java: $VERSION var2vcf_valid.pl: \$(echo \$(var2vcf_valid.pl -h | sed -n 2p | awk '{ print \$2 }')) END_VERSIONS """ diff --git a/modules/vardictjava/meta.yml b/modules/vardictjava/meta.yml index e3b2efe7..42480bc1 100644 --- a/modules/vardictjava/meta.yml +++ b/modules/vardictjava/meta.yml @@ -25,17 +25,29 @@ input: type: file description: BAM/SAM file pattern: "*.{bam,sam}" + + - bai: + type: file + description: Index of the BAM file + pattern: "*.bai" - - reference_fasta: + - fasta: type: file description: FASTA of the reference genome pattern: "*.{fa,fasta}" - - regions_of_interest: + - fasta_fai: + type: file + description: The index of the FASTA of the reference genome + pattern: "*.fai" + + - bed: type: file description: BED with the regions of interest pattern: "*.bed" + + output: - meta: type: map diff --git a/tests/modules/vardictjava/nextflow.config b/tests/modules/vardictjava/nextflow.config index e08201cc..50f50a7a 100644 --- a/tests/modules/vardictjava/nextflow.config +++ b/tests/modules/vardictjava/nextflow.config @@ -2,8 +2,4 @@ process { publishDir = { "${params.outdir}/${task.process.tokenize(':')[-1].tokenize('_')[0].toLowerCase()}" } - withName: VARDICTJAVA { - ext.args = '' - ext.args2 = '' - } } \ No newline at end of file diff --git a/tests/modules/vardictjava/test.yml b/tests/modules/vardictjava/test.yml index 549d688e..8cb29c4e 100644 --- a/tests/modules/vardictjava/test.yml +++ b/tests/modules/vardictjava/test.yml @@ -6,4 +6,4 @@ - path: output/vardictjava/test.vcf.gz md5sum: 3f1f227afc532bddeb58f16fd3013fc8 - path: output/vardictjava/versions.yml - md5sum: aac455b8c9c9194c5fed80e4fd495b96 + md5sum: 9b62c431a4f2680412b61c7071bdb1cd From 71c5d213742b56c62a75b6f34b1f734e073f415e Mon Sep 17 00:00:00 2001 From: Nicolas Vannieuwkerke Date: Tue, 3 May 2022 08:34:12 +0200 Subject: [PATCH 275/283] ran prettier --- modules/vardictjava/meta.yml | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/modules/vardictjava/meta.yml b/modules/vardictjava/meta.yml index 42480bc1..83675fdc 100644 --- a/modules/vardictjava/meta.yml +++ b/modules/vardictjava/meta.yml @@ -25,10 +25,10 @@ input: type: file description: BAM/SAM file pattern: "*.{bam,sam}" - + - bai: type: file - description: Index of the BAM file + description: Index of the BAM file pattern: "*.bai" - fasta: @@ -46,8 +46,6 @@ input: description: BED with the regions of interest pattern: "*.bed" - - output: - meta: type: map From d33253513fcd8900db9480450496886ea1f17adb Mon Sep 17 00:00:00 2001 From: Nicolas Vannieuwkerke Date: Tue, 3 May 2022 10:15:30 +0200 Subject: [PATCH 276/283] put the version on top of the file --- modules/vardictjava/main.nf | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/vardictjava/main.nf b/modules/vardictjava/main.nf index 14964989..454b86a4 100644 --- a/modules/vardictjava/main.nf +++ b/modules/vardictjava/main.nf @@ -1,3 +1,5 @@ +def VERSION = '1.8.3' + process VARDICTJAVA { tag "$meta.id" label 'process_medium' @@ -24,8 +26,6 @@ process VARDICTJAVA { def args2 = task.ext.args2 ?: '' def prefix = task.ext.prefix ?: "${meta.id}" - def VERSION = '1.8.3' - """ vardict-java \\ $args \\ From 7c89af359c19131d2f9e8d32bd6ebb0dd682e4d2 Mon Sep 17 00:00:00 2001 From: nvnieuwk <101190534+nvnieuwk@users.noreply.github.com> Date: Tue, 3 May 2022 10:15:51 +0200 Subject: [PATCH 277/283] Update modules/vardictjava/meta.yml Co-authored-by: James A. Fellows Yates --- modules/vardictjava/meta.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/modules/vardictjava/meta.yml b/modules/vardictjava/meta.yml index 83675fdc..63a52611 100644 --- a/modules/vardictjava/meta.yml +++ b/modules/vardictjava/meta.yml @@ -20,7 +20,6 @@ input: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - bam: type: file description: BAM/SAM file From 2dff193b543d78c1176d19c6f9c42385caa131c2 Mon Sep 17 00:00:00 2001 From: nvnieuwk <101190534+nvnieuwk@users.noreply.github.com> Date: Tue, 3 May 2022 10:15:59 +0200 Subject: [PATCH 278/283] Update modules/vardictjava/meta.yml Co-authored-by: James A. Fellows Yates --- modules/vardictjava/meta.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/modules/vardictjava/meta.yml b/modules/vardictjava/meta.yml index 63a52611..a0af9fcc 100644 --- a/modules/vardictjava/meta.yml +++ b/modules/vardictjava/meta.yml @@ -29,7 +29,6 @@ input: type: file description: Index of the BAM file pattern: "*.bai" - - fasta: type: file description: FASTA of the reference genome From 3cdbdf50fc6423826e4aeed8fef8e1f75a19f621 Mon Sep 17 00:00:00 2001 From: nvnieuwk <101190534+nvnieuwk@users.noreply.github.com> Date: Tue, 3 May 2022 10:16:14 +0200 Subject: [PATCH 279/283] Update modules/vardictjava/meta.yml Co-authored-by: James A. Fellows Yates --- modules/vardictjava/meta.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/modules/vardictjava/meta.yml b/modules/vardictjava/meta.yml index a0af9fcc..3afa7e62 100644 --- a/modules/vardictjava/meta.yml +++ b/modules/vardictjava/meta.yml @@ -24,7 +24,6 @@ input: type: file description: BAM/SAM file pattern: "*.{bam,sam}" - - bai: type: file description: Index of the BAM file From 1609dfc96dd1ff205f1cac511b8dabb64f8a3db6 Mon Sep 17 00:00:00 2001 From: nvnieuwk <101190534+nvnieuwk@users.noreply.github.com> Date: Tue, 3 May 2022 10:16:20 +0200 Subject: [PATCH 280/283] Update modules/vardictjava/meta.yml Co-authored-by: James A. Fellows Yates --- modules/vardictjava/meta.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/modules/vardictjava/meta.yml b/modules/vardictjava/meta.yml index 3afa7e62..325ce1ca 100644 --- a/modules/vardictjava/meta.yml +++ b/modules/vardictjava/meta.yml @@ -32,7 +32,6 @@ input: type: file description: FASTA of the reference genome pattern: "*.{fa,fasta}" - - fasta_fai: type: file description: The index of the FASTA of the reference genome From 01085b3e090b0e64ed0db23a93e00bd891a11639 Mon Sep 17 00:00:00 2001 From: nvnieuwk <101190534+nvnieuwk@users.noreply.github.com> Date: Tue, 3 May 2022 10:16:30 +0200 Subject: [PATCH 281/283] Update modules/vardictjava/meta.yml Co-authored-by: James A. Fellows Yates --- modules/vardictjava/meta.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/modules/vardictjava/meta.yml b/modules/vardictjava/meta.yml index 325ce1ca..5be27690 100644 --- a/modules/vardictjava/meta.yml +++ b/modules/vardictjava/meta.yml @@ -36,7 +36,6 @@ input: type: file description: The index of the FASTA of the reference genome pattern: "*.fai" - - bed: type: file description: BED with the regions of interest From 11c6faf433d19803a1a9c198c83bde5ce5ae9706 Mon Sep 17 00:00:00 2001 From: nvnieuwk <101190534+nvnieuwk@users.noreply.github.com> Date: Tue, 3 May 2022 10:16:36 +0200 Subject: [PATCH 282/283] Update modules/vardictjava/meta.yml Co-authored-by: James A. Fellows Yates --- modules/vardictjava/meta.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/modules/vardictjava/meta.yml b/modules/vardictjava/meta.yml index 5be27690..196b090e 100644 --- a/modules/vardictjava/meta.yml +++ b/modules/vardictjava/meta.yml @@ -47,7 +47,6 @@ output: description: | Groovy Map containing sample information e.g. [ id:'test', single_end:false ] - - versions: type: file description: File containing software versions From 818764128039936e8d94df70427667638b0276a9 Mon Sep 17 00:00:00 2001 From: nvnieuwk <101190534+nvnieuwk@users.noreply.github.com> Date: Tue, 3 May 2022 10:16:45 +0200 Subject: [PATCH 283/283] Update modules/vardictjava/meta.yml Co-authored-by: James A. Fellows Yates --- modules/vardictjava/meta.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/modules/vardictjava/meta.yml b/modules/vardictjava/meta.yml index 196b090e..59fba966 100644 --- a/modules/vardictjava/meta.yml +++ b/modules/vardictjava/meta.yml @@ -51,7 +51,6 @@ output: type: file description: File containing software versions pattern: "versions.yml" - - vcf: type: file description: VCF file output